id
stringlengths
1
4
tokens
sequence
ner_tags
sequence
600
[ "To", "demonstrate", "the", "effectiveness", "of", "SSEGCN,", "we", "compare", "our", "model", "with", "previous", "works", "using", "accuracy", "and", "macro-averaged", "F1", "as", "evaluation", "metrics,", "and", "report", "results", "in", "Table", "2." ]
[ 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
601
[ "4.4", "Main", "Results" ]
[ 0, 0, 0 ]
602
[ "16)", "T-GCN", "(Tian", "et", "al.,", "2021)", "utilizes", "dependency", "types", "to", "distinguish", "different", "relations", "in", "the", "graph", "and", "uses", "attentive", "layer", "ensemble", "to", "learn", "the", "contextual", "information", "from", "different", "GCN", "layers." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
603
[ "15)", "BERT4GCN", "(Zhang", "and", "Qian,", "2020)", "integrates", "the", "grammatical", "sequential", "features", "from", "the", "PLM", "of", "BERT", "and", "the", "syntactic", "knowledge", "from", "dependency", "graphs." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
604
[ "model", "based", "on", "pre-trained", "BERT." ]
[ 0, 0, 0, 0, 0 ]
605
[ "14)", "DGEDT+BERT", "(Li", "et", "al.,", "2021)", "is", "the", "DGEDT" ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0 ]
606
[ "13)", "R-GAT+BERT", "(Wang", "et", "al.,", "2020)", "is", "the", "RGAT", "model", "based", "on", "pre-trained", "BERT." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
607
[ "12)", "BERT", "(Devlin", "et", "al.,", "2019)", "is", "the", "vanilla", "BERT", "model,", "which", "adopts", "“[CLS]", "sentence", "[SEP]", "aspect", "[SEP]”", "as", "input." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
608
[ "11)", "DualGCN", "(Li", "et", "al.,", "2021)", "designs", "a", "SynGCN", "module", "and", "a", "SemGCN", "module", "with", "orthogonal", "and", "differential", "regularizers." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
609
[ "10)", "DGEDT", "(Tang", "et", "al.,", "2020)", "proposes", "a", "dependency", "graph", "enhanced", "dual-transformer", "network", "by", "jointly", "considering", "the", "flat", "representations", "from", "Transformer", "and", "graph-based", "representations", "from", "the", "dependency", "graph." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
610
[ "9)", "R-GAT", "(Wang", "et", "al.,", "2020)", "proposes", "a", "relational", "graph", "attention", "network", "to", "encode", "the", "new", "tree", "reshaped", "by", "an", "ordinary", "dependency", "parse", "tree." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
611
[ "8)", "kumaGCN", "(Chen", "et", "al.,", "2020)", "combines", "information", "from", "a", "dependency", "graph", "and", "a", "latent", "graph", "to", "learn", "syntactic", "features." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
612
[ "7)", "BiGCN", "(Zhang", "and", "Qian,", "2020)", "builds", "a", "concept", "hierarchy", "on", "both", "the", "syntactic", "and", "lexical", "graphs", "for", "sentiment", "prediction." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
613
[ "6)", "TD-GAT", "(Huang", "and", "Carley,", "2019)", "proposes", "a", "target-dependent", "graph", "attention", "network", "for", "aspect", "level", "sentiment", "classification,", "which", "explicitly", "utilizes", "the", "dependency", "relationship", "among", "words." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
614
[ "5)", "CDT", "(Sun", "et", "al.,", "2019)", "utilizes", "a", "convolution", "over", "a", "dependency", "tree", "model", "to", "learn", "the", "representations", "of", "sentence", "features." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
615
[ "4)", "ASGCN", "(Zhang", "et", "al.,", "2019)", "proposes", "to", "build", "GCN", "to", "learn", "syntactical", "information", "and", "word", "dependencies", "for", "ABSA." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11 ]
616
[ "3)", "TNet", "(Li", "et", "al.,", "2018)", "employs", "a", "CNN", "model", "to", "extract", "salient", "features", "from", "target-specific", "embeddings", "by", "transformed", "BiLSTM", "embeddings." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
617
[ "2)", "RAM", "(Chen", "et", "al.,", "2017)", "proposes", "a", "recurrent", "attention", "memory", "network", "to", "learn", "the", "sentence", "representation." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
618
[ "1)", "IAN", "(Ma", "et", "al.,", "2017)", "interactively", "learns", "the", "relationship", "between", "aspect", "and", "their", "context." ]
[ 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
619
[ "To", "comprehensively", "evaluate", "the", "performance", "of", "our", "model,", "we", "compare", "with", "state-of-the-art", "baselines:" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
620
[ "4.3", "Baseline", "Comparisons" ]
[ 0, 0, 0 ]
621
[ "For", "SSEGCN+BERT,", "we", "employ", "the", "bert-base-uncased3", "English", "version." ]
[ 0, 1, 0, 0, 0, 0, 0, 0 ]
622
[ "Our", "model", "is", "trained", "using", "the", "Adam", "optimizer", "with", "a", "learning", "rate", "of", "0.002", "to", "optimize", "the", "parameters." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 5, 0, 0, 0, 0 ]
623
[ "Besides,", "dropout", "function", "is", "applied", "to", "the", "input", "word", "representations", "of", "the", "BiLSTM", "and", "the", "dropout", "rate", "is", "set", "as", "0.3." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0, 0, 0, 5 ]
624
[ "The", "batch", "size", "of", "all", "model", "is", "set", "as", "16", "and", "the", "number", "of", "GCN", "layers", "is", "2." ]
[ 0, 3, 4, 0, 0, 0, 0, 0, 0, 5, 0, 0, 3, 4, 4, 4, 0, 5 ]
625
[ "All", "sentences", "are", "parsed", "by", "the", "Stanford", "parser2." ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
626
[ "Then,", "word", "embeddings,", "POS", "embeddings", "and", "position", "embeddings", "are", "concatenated", "as", "input", "word", "representations." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
627
[ "Additionally,", "we", "use", "30-dimensional", "Part-of-Speech", "(POS)", "embeddings", "and", "30-dimensional", "position", "embeddings", "which", "is", "the", "relative", "position", "of", "each", "word", "with", "respect", "to", "the", "aspect", "term", "in", "the", "sentence." ]
[ 0, 0, 0, 5, 1, 2, 2, 0, 5, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
628
[ "For", "our", "experiments,", "we", "initialize", "word", "embeddings", "with", "300-dimensional", "Glove", "vectors", "provided", "by", "Pennington", "et", "al.", "(2014)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0 ]
629
[ "Implementation", "Details" ]
[ 0, 0 ]
630
[ "4.2" ]
[ 0 ]
631
[ "The", "statistics", "for", "three", "datasets", "are", "reported", "in", "Table", "1." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
632
[ "Each", "aspect", "is", "labeled", "by", "one", "of", "the", "three", "sentiment", "polarities:", "positive,", "neutral", "and", "negative." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
633
[ "We", "conduct", "experiments", "on", "three", "benchmark", "datasets", "for", "aspect-based", "sentiment", "analysis,", "including", "Restaurant", "and", "Laptop", "reviews", "from", "SemEval", "2014", "Task", "4", "(Pontiki", "et", "al.,", "2014)", "and", "Twitter", "(twitter", "posts)", "from", "Dong", "et", "al.", "(2014)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 14, 14, 14, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0 ]
634
[ "4.1", "Datasets" ]
[ 0, 0 ]
635
[ "4", "Experiments" ]
[ 0, 0 ]
636
[ "contains", "all", "the", "sentence-aspect", "pairs", "and", "where", "a", "represents", "the", "aspect", "appearing", "in", "sentence", "s.", "θ", "represents", "all", "the", "trainable", "parameters", "and", "is", "the", "collection", "of", "sentiment", "polarities." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
637
[ "Finally,", "the", "standard", "cross-entropy", "loss", "is", "used", "as", "our", "objective", "function:" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
638
[ "3.5", "Training" ]
[ 0, 0 ]
639
[ "where", "Wp", "and", "bp", "are", "the", "learnable", "weight", "and", "bias." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
640
[ "Moreover,", "an", "average", "pooling", "to", "retain", "most", "of", "the", "information", "in", "the", "aspect", "term", "representation", "hl", "a." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
641
[ "We", "mask", "the", "non-aspect", "words", "of", "the", "output", "representation", "learned", "by", "the", "GCN", "layer", "to", "obtain", "aspect", "term", "representation." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
642
[ "After", "aggregating", "node", "representation", "from", "each", "layer", "of", "SSEGCN,", "we", "obtain", "the", "final", "feature", "representation." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0 ]
643
[ "The", "final", "output", "representation", "of", "the", "l-layer", "GCN", "is", "H", "l", "=" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
644
[ "where", "W", "l", "is", "a", "linear", "transformation", "weight,", "bl", "is", "a", "bias", "term,", "and", "σ", "is", "a", "nonlinear", "function." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
645
[ "Each", "node", "in", "the", "l-th", "GCN", "layer", "is", "updated", "according", "to", "the", "hidden", "representations", "of", "its", "neighborhoods:" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
646
[ "Since", "we", "have", "p", "different", "syntactic", "mask", "matrices,", "p", "graph", "convolution", "operations", "over", "Amask", "∈", "1", "as", "the", "inRp", "×", "put", "state", "and", "hl", "as", "the", "output", "state", "of", "the", "l-th", "layer,", "h0", "is", "the", "output", "of", "sentence", "encoding", "layer." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
647
[ "3.4", "GCN", "Layer" ]
[ 0, 0, 0 ]
648
[ "To", "obtain", "global", "information", "and", "local", "feature,", "attention", "scopes", "are", "restricted", "by", "different", "syntactic", "distances." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
649
[ "The", "calculation", "of", "syntactic", "mask", "matrix", "M", "k", "with", "threshold", "k", "can", "be", "formulated", "as:" ]
[ 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 3, 0, 0, 0, 0 ]
650
[ "When", "syntactic", "distance", "is", "relatively", "small,", "our", "model", "can", "learn", "local", "information;", "on", "the", "contrary,", "if", "syntactic", "distance", "is", "relatively", "large,", "global", "structure", "information", "will", "be", "considered." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
651
[ "Therefore,", "we", "set", "the", "number", "of", "syntactic", "mask", "matrices", "based", "on", "different", "syntactic", "distances", "as", "the", "same", "as", "the", "number", "of", "attention", "heads." ]
[ 0, 0, 0, 0, 3, 4, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 4, 4 ]
652
[ "In", "the", "previous", "part,", "the", "p-head", "attention", "mechanism", "can", "obtain", "p", "adjacency", "matrices." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
653
[ "(4)" ]
[ 0 ]
654
[ "D(i,", "j)", "=", "min", "d(vi,", "vj)" ]
[ 0, 0, 0, 0, 0, 0 ]
655
[ "Since", "there", "are", "multiple", "paths", "between", "nodes", "on", "the", "syntactic", "dependency", "tree,", "we", "define", "the", "distance", "of", "the", "shortest", "path", "as", "D:" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
656
[ "Then,", "we", "define", "the", "distance", "between", "node", "vi", "and", "vj", "as", "d(vi,", "vj)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
657
[ "We", "treat", "the", "syntactic", "dependency", "tree", "as", "an", "undirected", "graph,", "and", "each", "token", "as", "a", "node." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
658
[ "In", "this", "section,", "we", "first", "introduce", "the", "syntactic", "mask", "matrix,", "and", "then", "mask", "each", "fully", "connected", "graph", "in", "terms", "of", "different", "syntactic", "distances." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
659
[ "3.3", "Syntax-Mask", "Layer" ]
[ 0, 0, 0 ]
660
[ "For", "each", "Ai,", "it", "represents", "a", "fully", "connected", "graph." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
661
[ "n", "is", "used", "as", "the", "input", "for", "the", "comwhere", "Ai", "putation", "of", "the", "later", "Syntax-Mask", "Layer." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
662
[ "d", "and", "W", "K" ]
[ 0, 0, 0, 0 ]
663
[ "Then,", "we", "integrate", "aspect-aware", "attention", "score", "with", "self-attention", "score:" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
664
[ "W", "Q", "d", "are", "Rd", "×", "learnable", "weights." ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
665
[ "where", "Q", "and", "K", "are", "both", "equal", "to", "H", "produced", "by", "encoding", "layer." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
666
[ "The", "calculation", "involves", "a", "query", "and", "a", "key:" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
667
[ "3.2.2", "Self-Attention", "Similarly,", "here", "Aself", "can", "be", "constructed", "by", "utilizing", "p-head", "self-attention", "(Vaswani", "et", "al.,", "2017)", "that", "captures", "the", "interaction", "between", "two", "arbitrary", "words", "of", "a", "single", "sentence." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
668
[ "We", "propose", "the", "aspect-aware", "attention", "mechanism,", "which", "regards", "aspect", "term", "as", "query", "to", "attention", "calculation", "for", "learning", "aspect-related", "features:" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
669
[ "3.2.1", "Aspect-aware", "Attention", "Unlike", "sentence", "level", "sentiment", "classification", "task,", "aspect-based", "sentiment", "classification", "aims", "at", "judging", "sentiments", "of", "one", "specific", "aspect", "term", "in", "its", "context", "sentence,", "and", "thus", "calls", "for", "modeling", "particular", "semantic", "correlation", "based", "on", "different", "aspect", "terms." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
670
[ "Here,", "we", "construct", "p", "matrices", "and", "the", "p", "is", "a", "hyper-parameter." ]
[ 0, 0, 0, 3, 0, 0, 0, 3, 0, 0, 0 ]
671
[ "Figure", "2", "shows", "multiple", "attention", "adjacency", "matrices." ]
[ 0, 0, 0, 0, 0, 0, 0 ]
672
[ "In", "this", "subsection,", "we", "combine", "aspect-aware", "attention", "and", "self-attention", "for", "better", "semantic", "features." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
673
[ "Attention", "mechanism", "is", "a", "common", "way", "to", "capture", "the", "interactions", "between", "the", "aspect", "and", "context", "words", "(Fan", "et", "al.,", "2018)." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
674
[ "3.2", "Attention", "Layer" ]
[ 0, 0, 0 ]
675
[ "For", "the", "BERT", "encoder,", "we", "adopt", "“[CLS]", "sentence", "[SEP]", "aspect", "[SEP]”", "as", "input." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
676
[ "Take", "H", "as", "initial", "nodes", "representation", "in", "SSEGCN." ]
[ 0, 0, 0, 0, 0, 0, 0, 1 ]
677
[ "With", "the", "word", "embeddings", "of", "the", "sentence,", "BiLSTM", "is", "leveraged", "to", "produce", "hidden", "state", "vectors", "R2d." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
678
[ "Thus,", "the", "sentence", "s", "has", "corresponding", "word", "embeddings", "x", "=", "x1,", "x2,", "...,", "xn}", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
679
[ "We", "first", "map", "each", "word", "into", "a", "low-dimensional", "real-value", "vector", "with", "embedding", "de,", "where", "matrix", "E", "is", "the", "size", "of", "vocabulary", "and", "de", "denotes", "the", "dimensionality", "of", "word", "embeddings." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
680
[ "We", "utilize", "BiLSTM", "or", "BERT", "(Devlin", "et", "al.,", "2019)", "as", "sentence", "encoder", "to", "extract", "hidden", "contextual", "representations." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
681
[ "Given", "a", "sentence-aspect", "pair", "(s,", "a),", "where", "s", "=", "a1,", "a2,", "...,", "am}", "w1,", "w2,", "...,", "wn}", "is", "an", "as{", "{", "pect", "and", "also", "a", "sub-sequence", "of", "the", "sentence", "s." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
682
[ "3.1" ]
[ 0 ]
683
[ "Next,", "components", "of", "SSEGCN", "will", "be", "introduced", "separately", "in", "the", "rest", "of", "the", "sections." ]
[ 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
684
[ "In", "this", "section,", "we", "describe", "the", "SSEGCN", "model", "which", "is", "mainly", "composed", "of", "three", "components:", "the", "Input", "and", "Encoding", "Layer,", "the", "Attention", "Layer,", "the", "Syntax-Mask", "Layer", "and", "the", "GCN", "Layer." ]
[ 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
685
[ "Figure", "2", "gives", "an", "overview", "of", "SSEGCN." ]
[ 0, 0, 0, 0, 0, 0, 1 ]
686
[ "3", "Proposed", "SSEGCN" ]
[ 0, 0, 1 ]
687
[ "However,", "these", "approaches", "generally", "ignore", "the", "effective", "fusion", "of", "syntactic", "structure", "and", "semantic", "correlation", "to", "obtain", "richer", "information." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
688
[ "Tian", "et", "al.", "(2021)", "utilized", "dependency", "types", "and", "distinguished", "different", "relations", "in", "the", "dependency", "tree." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
689
[ "Liang", "et", "al.", "(2021)", "constructed", "a", "sentiment", "enhancement", "graph", "by", "integrating", "the", "sentiment", "knowledge", "from", "SenticNet", "to", "consider", "the", "affective", "information", "between", "opinion", "words", "and", "aspect", "term." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
690
[ "Zhang", "and", "Qian", "(2020)", "constructed", "a", "global", "lexical", "graph", "to", "capture", "the", "word", "co-occurrence", "relation", "and", "combined", "a", "global", "lexical", "graph", "and", "a", "syntactic", "graph." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
691
[ "(2020)", "build", "aspectfocused", "and", "inter-aspect", "graphs", "to", "learn", "aspectspecific", "sentiment", "features." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
692
[ "Liang", "et", "al." ]
[ 0, 0, 0 ]
693
[ "(Zhang", "et", "al.,", "2019;", "Sun", "et", "al.,", "2019)", "stacked", "a", "GCN", "layer", "to", "extract", "rich", "representations", "over", "dependency", "tree." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
694
[ "Syntactical", "information", "can", "establish", "relation", "connections", "between", "aspect", "and", "corresponding", "opinion", "words,", "GCN", "based", "on", "dependency", "tree", "have", "achieved", "impressive", "performance", "in", "ABSA." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11 ]
695
[ "Another", "trend", "explicitly", "leverages", "dependency", "tree." ]
[ 0, 0, 0, 0, 0, 0 ]
696
[ "with", "both", "orthogonal", "regularization", "and", "sparse", "regularization." ]
[ 0, 0, 0, 0, 0, 0, 0 ]
697
[ "1https://github.com/zhangzheng1997/" ]
[ 0 ]
698
[ "(2018)", "employed", "a", "constrained", "attention", "network" ]
[ 0, 0, 0, 0, 0, 0 ]
699
[ "Hu", "et", "al." ]
[ 0, 0, 0 ]