Skip to content

Commit 1c2744a

Browse files
committed
Merge remote-tracking branch 'upstream/main' into insight_datatype_support
Committed-by: bingqing.lbq from Dev container
2 parents 914b66c + 1fc617f commit 1c2744a

File tree

283 files changed

+8601
-3451
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

283 files changed

+8601
-3451
lines changed

.github/workflows/build-graphscope-images-linux.yml

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ jobs:
6161
df -h
6262
make learning CI=false VERSION=${SHORT_SHA}
6363
df -h
64-
make graphlearn-torch CI=false VERSION=${SHORT_SHA}
64+
# make graphlearn-torch CI=false VERSION=${SHORT_SHA}
6565
df -h
6666
6767
- name: Release Nightly Image
@@ -86,7 +86,7 @@ jobs:
8686
sudo docker tag graphscope/interactive-frontend:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/interactive-frontend:${tag}
8787
sudo docker tag graphscope/interactive-executor:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/interactive-executor:${tag}
8888
sudo docker tag graphscope/learning:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/learning:${tag}
89-
sudo docker tag graphscope/graphlearn-torch:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
89+
# sudo docker tag graphscope/graphlearn-torch:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
9090
9191
sudo docker push ${{ env.REGISTRY }}/graphscope/coordinator:${tag}
9292
sudo docker push ${{ env.REGISTRY }}/graphscope/analytical:${tag}
@@ -96,7 +96,7 @@ jobs:
9696
sudo docker push ${{ env.REGISTRY }}/graphscope/interactive-frontend:${tag}
9797
sudo docker push ${{ env.REGISTRY }}/graphscope/interactive-executor:${tag}
9898
sudo docker push ${{ env.REGISTRY }}/graphscope/learning:${tag}
99-
sudo docker push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
99+
# sudo docker push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
100100
101101
# dataset image
102102
# Note! dataset image are built manually just use the latest one.
@@ -128,7 +128,7 @@ jobs:
128128
sudo docker tag graphscope/interactive-frontend:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/interactive-frontend:${tag}
129129
sudo docker tag graphscope/interactive-executor:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/interactive-executor:${tag}
130130
sudo docker tag graphscope/learning:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/learning:${tag}
131-
sudo docker tag graphscope/graphlearn-torch:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
131+
# sudo docker tag graphscope/graphlearn-torch:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
132132
133133
sudo docker push ${{ env.REGISTRY }}/graphscope/coordinator:${tag}
134134
sudo docker push ${{ env.REGISTRY }}/graphscope/analytical:${tag}
@@ -137,7 +137,7 @@ jobs:
137137
sudo docker push ${{ env.REGISTRY }}/graphscope/interactive-frontend:${tag}
138138
sudo docker push ${{ env.REGISTRY }}/graphscope/interactive-executor:${tag}
139139
sudo docker push ${{ env.REGISTRY }}/graphscope/learning:${tag}
140-
sudo docker push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
140+
# sudo docker push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
141141
142142
# dataset image
143143
# Note! dataset image are built manually just use the latest one.
@@ -154,7 +154,7 @@ jobs:
154154
sudo docker tag graphscope/interactive-frontend:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/interactive-frontend:${tag}
155155
sudo docker tag graphscope/interactive-executor:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/interactive-executor:${tag}
156156
sudo docker tag graphscope/learning:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/learning:${tag}
157-
sudo docker tag graphscope/graphlearn-torch:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
157+
# sudo docker tag graphscope/graphlearn-torch:${SHORT_SHA} ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
158158
159159
sudo docker push ${{ env.REGISTRY }}/graphscope/coordinator:${tag}
160160
sudo docker push ${{ env.REGISTRY }}/graphscope/analytical:${tag}
@@ -163,7 +163,7 @@ jobs:
163163
sudo docker push ${{ env.REGISTRY }}/graphscope/interactive-frontend:${tag}
164164
sudo docker push ${{ env.REGISTRY }}/graphscope/interactive-executor:${tag}
165165
sudo docker push ${{ env.REGISTRY }}/graphscope/learning:${tag}
166-
sudo docker push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
166+
# sudo docker push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${tag}
167167
168168
169169
build-image-aarch64:
@@ -385,9 +385,9 @@ jobs:
385385
${{ env.REGISTRY }}/graphscope/learning:${version}a${time}-x86_64 \
386386
${{ env.REGISTRY }}/graphscope/learning:${version}a${time}-aarch64
387387
388-
sudo docker manifest create \
389-
${{ env.REGISTRY }}/graphscope/graphlearn-torch:${version}a${time} \
390-
${{ env.REGISTRY }}/graphscope/graphlearn-torch:${version}a${time}-x86_64
388+
# sudo docker manifest create \
389+
# ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${version}a${time} \
390+
# ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${version}a${time}-x86_64
391391
392392
sudo docker manifest create \
393393
${{ env.REGISTRY }}/graphscope/dataset:${version}a${time} \
@@ -399,7 +399,7 @@ jobs:
399399
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/interactive-frontend:${version}a${time}
400400
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/interactive-executor:${version}a${time}
401401
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/learning:${version}a${time}
402-
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${version}a${time}
402+
# sudo docker manifest push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${version}a${time}
403403
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/dataset:${version}a${time}
404404
405405
- name: Extract Tag Name
@@ -438,9 +438,9 @@ jobs:
438438
${{ env.REGISTRY }}/graphscope/learning:${{ steps.tag.outputs.TAG }}-x86_64 \
439439
${{ env.REGISTRY }}/graphscope/learning:${{ steps.tag.outputs.TAG }}-aarch64
440440
441-
sudo docker manifest create \
442-
${{ env.REGISTRY }}/graphscope/graphlearn-torch:${{ steps.tag.outputs.TAG }} \
443-
${{ env.REGISTRY }}/graphscope/graphlearn-torch:${{ steps.tag.outputs.TAG }}-x86_64
441+
# sudo docker manifest create \
442+
# ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${{ steps.tag.outputs.TAG }} \
443+
# ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${{ steps.tag.outputs.TAG }}-x86_64
444444
445445
sudo docker manifest create \
446446
${{ env.REGISTRY }}/graphscope/dataset:${{ steps.tag.outputs.TAG }} \
@@ -471,23 +471,23 @@ jobs:
471471
${{ env.REGISTRY }}/graphscope/learning:latest-x86_64 \
472472
${{ env.REGISTRY }}/graphscope/learning:latest-aarch64
473473
474-
sudo docker manifest create \
475-
${{ env.REGISTRY }}/graphscope/graphlearn-torch:latest \
476-
${{ env.REGISTRY }}/graphscope/graphlearn-torch:latest-x86_64
474+
# sudo docker manifest create \
475+
# ${{ env.REGISTRY }}/graphscope/graphlearn-torch:latest \
476+
# ${{ env.REGISTRY }}/graphscope/graphlearn-torch:latest-x86_64
477477
478478
# push
479479
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/coordinator:${{ steps.tag.outputs.TAG }}
480480
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/analytical:${{ steps.tag.outputs.TAG }}
481481
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/interactive-frontend:${{ steps.tag.outputs.TAG }}
482482
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/interactive-executor:${{ steps.tag.outputs.TAG }}
483483
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/learning:${{ steps.tag.outputs.TAG }}
484-
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${{ steps.tag.outputs.TAG }}
484+
# sudo docker manifest push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:${{ steps.tag.outputs.TAG }}
485485
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/dataset:${{ steps.tag.outputs.TAG }}
486486
487487
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/coordinator:latest
488488
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/analytical:latest
489489
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/interactive-frontend:latest
490490
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/interactive-executor:latest
491491
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/learning:latest
492-
sudo docker manifest push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:latest
492+
# sudo docker manifest push ${{ env.REGISTRY }}/graphscope/graphlearn-torch:latest
493493

.github/workflows/interactive.yml

Lines changed: 7 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -141,8 +141,8 @@ jobs:
141141
# download dataset
142142
git clone -b master --single-branch --depth=1 https://github.com/GraphScope/gstest.git ${GS_TEST_DIR}
143143
mkdir -p ${INTERACTIVE_WORKSPACE}/data/ldbc
144-
GRAPH_SCHEMA_YAML=${GS_TEST_DIR}/flex/ldbc-sf01-long-date/audit_graph_schema.yaml
145-
BUILD_LOAD_FILE=${GS_TEST_DIR}/flex/ldbc-sf01-long-date/audit_bulk_load.yaml
144+
GRAPH_SCHEMA_YAML=${GS_TEST_DIR}/flex/ldbc-sf01-long-date/audit_graph_schema_creationDate.yaml
145+
BUILD_LOAD_FILE=${GS_TEST_DIR}/flex/ldbc-sf01-long-date/audit_bulk_load_creationDate.yaml
146146
cp ${GRAPH_SCHEMA_YAML} ${INTERACTIVE_WORKSPACE}/data/ldbc/graph.yaml
147147
cp ${BUILD_LOAD_FILE} ${INTERACTIVE_WORKSPACE}/data/ldbc/import.yaml
148148
mkdir -p ${INTERACTIVE_WORKSPACE}/data/movies
@@ -295,14 +295,12 @@ jobs:
295295
# plus_one: (num: int64) -> (num: int64), CppEncoder
296296
# sample_app: (num: int64) -> (num: int64), kCypherJson
297297

298-
sed -i 's/default_graph: ldbc/default_graph: modern_graph/g' ${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml
299298
sed -i 's/interactive_workspace/temp_workspace/g' ${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml
300299
cd ${GITHUB_WORKSPACE}/flex/tests/interactive/
301300
bash test_plugin_loading.sh ${TMP_INTERACTIVE_WORKSPACE} modern_graph \
302301
${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml \
303302
./modern_graph_schema_v0_0.yaml ./modern_graph_schema_v0_1.yaml
304303
sed -i 's/temp_workspace/interactive_workspace/g' ${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml
305-
sed -i 's/default_graph: modern_graph/default_graph: movies/g' ${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml
306304

307305
- name: Let compiler use latest interactive java sdk
308306
env:
@@ -346,10 +344,8 @@ jobs:
346344
run: |
347345
cd ${GITHUB_WORKSPACE}/flex/tests/hqps/
348346
export ENGINE_TYPE=hiactor
349-
# change the default_graph config in ./interactive_config_test.yaml to ldbc
350-
sed -i 's/default_graph: movies/default_graph: ldbc/g' ./interactive_config_test.yaml
351-
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} ldbc \
352-
${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml
347+
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} ldbc CBO
348+
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} ldbc RBO
353349
354350
- name: Run End-to-End cypher adhoc movie query test
355351
env:
@@ -359,10 +355,7 @@ jobs:
359355
run: |
360356
cd ${GITHUB_WORKSPACE}/flex/tests/hqps/
361357
export ENGINE_TYPE=hiactor
362-
# change the default_graph config in ./interactive_config_test.yaml to movies
363-
sed -i 's/default_graph: ldbc/default_graph: movies/g' ./interactive_config_test.yaml
364-
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} movies \
365-
${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml
358+
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} movies RBO
366359
367360
- name: Run End-to-End cypher adhoc graph_algo query test
368361
env:
@@ -372,10 +365,7 @@ jobs:
372365
run: |
373366
cd ${GITHUB_WORKSPACE}/flex/tests/hqps/
374367
export ENGINE_TYPE=hiactor
375-
# change the default_graph config in ${GS_TEST_DIR}/flex/ldbc-sf01-long-date/interactive_config.yaml to graph_algo
376-
sed -i 's/default_graph: movies/default_graph: graph_algo/g' ./interactive_config_test.yaml
377-
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} graph_algo \
378-
${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml
368+
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} graph_algo RBO
379369
380370
- name: Run Gremlin test on modern graph
381371
env:
@@ -385,9 +375,7 @@ jobs:
385375
run: |
386376
cd ${GITHUB_WORKSPACE}/flex/tests/hqps/
387377
export ENGINE_TYPE=hiactor
388-
sed -i 's/default_graph: graph_algo/default_graph: modern_graph/g' ./interactive_config_test.yaml
389-
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} modern_graph \
390-
${GITHUB_WORKSPACE}/flex/tests/hqps/interactive_config_test.yaml gremlin
378+
bash hqps_adhoc_test.sh ${INTERACTIVE_WORKSPACE} modern_graph RBO gremlin
391379
392380
test-build-flex:
393381
runs-on: ubuntu-22.04

.gitmodules

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,6 @@
1313
[submodule "flex/third_party/parallel-hashmap"]
1414
path = flex/third_party/parallel-hashmap
1515
url = https://github.com/greg7mdp/parallel-hashmap.git
16+
[submodule "flex/third_party/aliyun-oss-cpp-sdk"]
17+
path = flex/third_party/aliyun-oss-cpp-sdk
18+
url = https://github.com/aliyun/aliyun-oss-cpp-sdk.git

analytical_engine/core/context/context_protocols.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ enum class ContextDataType {
3636
};
3737

3838
/* N.B. These values should be the same as vineyard::TypeToInt::value. Because
39-
* theses values are used to decode in Python side. Refer:
39+
* these values are used to decode in Python side. Refer:
4040
* python.graphscope.framework.utils._to_numpy_dtype
4141
*/
4242
inline int ContextDataTypeToInt(ContextDataType type) {

analytical_engine/core/error.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -85,10 +85,10 @@ inline std::string formatEnumValue(const vineyard::ErrorCode& value) {
8585

8686
#ifndef __FRAME_CURRENT_EXCEPTION_TYPENAME
8787
#if defined(__GLIBCXX__) || defined(__GLIBCPP__)
88-
#define __FRAME_CURRENT_EXCEPTION_TYPENAME(var) \
89-
do { \
90-
std::exception_ptr __p = std::current_exception(); \
91-
var = __p ? __p.__cxa_exception_type()->name() : "unknow type"; \
88+
#define __FRAME_CURRENT_EXCEPTION_TYPENAME(var) \
89+
do { \
90+
std::exception_ptr __p = std::current_exception(); \
91+
var = __p ? __p.__cxa_exception_type()->name() : "unknown type"; \
9292
} while (0)
9393
#else
9494
#define __FRAME_CURRENT_EXCEPTION_TYPENAME(var) \

analytical_engine/java/grape-giraph/src/main/java/com/alibaba/graphscope/context/GiraphComputationAdaptorContext.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,7 @@ public void writeBackVertexData() {
176176
for (long lid = 0; lid < innerVerticesNum; ++lid) {
177177
// Write the output of toString().
178178
outputStream.writeBytes(vertexDataManager.getVertexData(lid).toString());
179-
long cur = outputStream.bytesWriten();
179+
long cur = outputStream.bytesWritten();
180180
offsets[(int) lid] = cur - previous;
181181
maxOffset = Math.max(offsets[(int) lid], maxOffset);
182182
previous = cur;

analytical_engine/java/grape-giraph/src/main/java/com/alibaba/graphscope/graph/impl/GiraphVertexIdManagerImpl.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ private FFIByteVectorInputStream generateVertexIdStream() {
148148
outputStream.finishSetting();
149149
logger.info(
150150
"Vertex id stream size: "
151-
+ outputStream.bytesWriten()
151+
+ outputStream.bytesWritten()
152152
+ ", vertices: "
153153
+ vertexNum);
154154
} catch (IOException e) {

analytical_engine/java/grape-giraph/src/main/java/com/alibaba/graphscope/graph/impl/VertexDataManagerImpl.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,7 @@ private void readVertexDataFromIFragment(FFIByteVectorOutputStream outputStream)
156156
outputStream.finishSetting();
157157
logger.info(
158158
"Vertex data stream size: "
159-
+ outputStream.bytesWriten()
159+
+ outputStream.bytesWritten()
160160
+ ", vertices: "
161161
+ vertexNum);
162162
} catch (IOException e) {

analytical_engine/java/grape-giraph/src/main/java/com/alibaba/graphscope/loader/impl/GraphDataBufferManagerImpl.java

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -170,14 +170,14 @@ private void check() {
170170
@Override
171171
public synchronized void addVertex(int threadId, Writable id, Writable value)
172172
throws IOException {
173-
int bytes = (int) -vidOutputStream[threadId].bytesWriten();
173+
int bytes = (int) -vidOutputStream[threadId].bytesWritten();
174174
id.write(vidOutputStream[threadId]);
175-
bytes += vidOutputStream[threadId].bytesWriten();
175+
bytes += vidOutputStream[threadId].bytesWritten();
176176
idOffsetsArr[threadId].push_back(bytes);
177177

178-
int bytes2 = (int) -vdataOutputStream[threadId].bytesWriten();
178+
int bytes2 = (int) -vdataOutputStream[threadId].bytesWritten();
179179
value.write(vdataOutputStream[threadId]);
180-
bytes2 += vdataOutputStream[threadId].bytesWriten();
180+
bytes2 += vdataOutputStream[threadId].bytesWritten();
181181
vdataOffsetsArr[threadId].push_back(bytes2);
182182
}
183183

@@ -187,19 +187,19 @@ public synchronized void addEdges(int threadId, Writable id, Iterable<Edge> edge
187187
int bytesEdgeSrcOffset = 0, bytesEdgeDstOffset = 0, bytesDataOffsets = 0;
188188

189189
for (Edge edge : edges) {
190-
bytesEdgeSrcOffset = (int) -edgeSrcIdOutputStream[threadId].bytesWriten();
190+
bytesEdgeSrcOffset = (int) -edgeSrcIdOutputStream[threadId].bytesWritten();
191191
id.write(edgeSrcIdOutputStream[threadId]);
192-
bytesEdgeSrcOffset += edgeSrcIdOutputStream[threadId].bytesWriten();
192+
bytesEdgeSrcOffset += edgeSrcIdOutputStream[threadId].bytesWritten();
193193
edgeSrcIdOffsetArr[threadId].push_back(bytesEdgeSrcOffset);
194194

195-
bytesEdgeDstOffset = (int) -edgeDstOutputStream[threadId].bytesWriten();
195+
bytesEdgeDstOffset = (int) -edgeDstOutputStream[threadId].bytesWritten();
196196
edge.getTargetVertexId().write(edgeDstOutputStream[threadId]);
197-
bytesEdgeDstOffset += edgeDstOutputStream[threadId].bytesWriten();
197+
bytesEdgeDstOffset += edgeDstOutputStream[threadId].bytesWritten();
198198
edgeDstIdOffsetArr[threadId].push_back(bytesEdgeDstOffset);
199199

200-
bytesDataOffsets = (int) -edgeDataOutStream[threadId].bytesWriten();
200+
bytesDataOffsets = (int) -edgeDataOutStream[threadId].bytesWritten();
201201
edge.getValue().write(edgeDataOutStream[threadId]);
202-
bytesDataOffsets += edgeDataOutStream[threadId].bytesWriten();
202+
bytesDataOffsets += edgeDataOutStream[threadId].bytesWritten();
203203
edgeDataOffsetsArr[threadId].push_back(bytesDataOffsets);
204204
}
205205
}
@@ -210,19 +210,19 @@ public void addEdge(
210210
throws IOException {
211211
int bytesEdgeSrcOffset = 0, bytesEdgeDstOffset = 0, bytesDataOffsets = 0;
212212

213-
bytesEdgeSrcOffset = (int) -edgeSrcIdOutputStream[threadId].bytesWriten();
213+
bytesEdgeSrcOffset = (int) -edgeSrcIdOutputStream[threadId].bytesWritten();
214214
srcId.write(edgeSrcIdOutputStream[threadId]);
215-
bytesEdgeSrcOffset += edgeSrcIdOutputStream[threadId].bytesWriten();
215+
bytesEdgeSrcOffset += edgeSrcIdOutputStream[threadId].bytesWritten();
216216
edgeSrcIdOffsetArr[threadId].push_back(bytesEdgeSrcOffset);
217217

218-
bytesEdgeDstOffset = (int) -edgeDstOutputStream[threadId].bytesWriten();
218+
bytesEdgeDstOffset = (int) -edgeDstOutputStream[threadId].bytesWritten();
219219
dstId.write(edgeDstOutputStream[threadId]);
220-
bytesEdgeDstOffset += edgeDstOutputStream[threadId].bytesWriten();
220+
bytesEdgeDstOffset += edgeDstOutputStream[threadId].bytesWritten();
221221
edgeDstIdOffsetArr[threadId].push_back(bytesEdgeDstOffset);
222222

223-
bytesDataOffsets = (int) -edgeDataOutStream[threadId].bytesWriten();
223+
bytesDataOffsets = (int) -edgeDataOutStream[threadId].bytesWritten();
224224
value.write(edgeDataOutStream[threadId]);
225-
bytesDataOffsets += edgeDataOutStream[threadId].bytesWriten();
225+
bytesDataOffsets += edgeDataOutStream[threadId].bytesWritten();
226226
edgeDataOffsetsArr[threadId].push_back(bytesDataOffsets);
227227

228228
// logger.debug("worker [{}] adding edge [{}]->[{}], value {}", workerId, srcId,

0 commit comments

Comments
 (0)