Skip to content

Commit

Permalink
[Doc] Improve spelling (#175)
Browse files Browse the repository at this point in the history
  • Loading branch information
Ziy1-Tan authored May 26, 2023
1 parent 68e77ab commit e19c553
Show file tree
Hide file tree
Showing 8 changed files with 20 additions and 20 deletions.
2 changes: 1 addition & 1 deletion cpp/README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ After the building, you can install the GraphAr C++ library with:
Generate API document
^^^^^^^^^^^^^^^^^^^^^

Building the API document with Doxgen:
Building the API document with Doxygen:

.. code-block:: shell
Expand Down
2 changes: 1 addition & 1 deletion cpp/test/test_arrow_chunk_writer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ limitations under the License.
#define CATCH_CONFIG_MAIN
#include <catch2/catch.hpp>

TEST_CASE("test_vertex_property_wrtier_from_file") {
TEST_CASE("test_vertex_property_writer_from_file") {
std::string root;
REQUIRE(GetTestResourceRoot(&root).ok());

Expand Down
2 changes: 1 addition & 1 deletion docs/user-guide/getting-started.rst
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ The file `person.vertex.yml`_ located inside the test data contains an example o

Edge information
````````````````
Each edge information file defines a single type of edges with specific labels for the source vertex, destination vertex and the edge, e.g., "person_konws_person" in this case. It defines the meta information such as the edge chunk size, the source vertex chunk size, the destination vertex chunk size, if the edges are directed or not, the relative file path for edge data files, the adjLists and the version of GraphAr. The file `person_knows_person.edge.yml`_ located inside the test data contains an example of the edge information file.
Each edge information file defines a single type of edges with specific labels for the source vertex, destination vertex and the edge, e.g., "person_knows_person" in this case. It defines the meta information such as the edge chunk size, the source vertex chunk size, the destination vertex chunk size, if the edges are directed or not, the relative file path for edge data files, the adjLists and the version of GraphAr. The file `person_knows_person.edge.yml`_ located inside the test data contains an example of the edge information file.

In GAR format, separate data files are used to store the structure (called adjList) and the properties for edges. The adjList type can be either of **unordered_by_source**, **unordered_by_dest**, **ordered_by_source** or **ordered_by_dest** (see `Edges in GraphAr <file-format.html#edges-in-graphar>`_ for more). For a specific type of adjList, the meta information includes its file path prefix, the file type, as well as all the property groups attached.

Expand Down
8 changes: 4 additions & 4 deletions spark/src/main/scala/com/alibaba/graphar/EdgeInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -106,12 +106,12 @@ class EdgeInfo() {
throw new IllegalArgumentException
}

/** Check if the edge info contains the property group in cerain adj list structure.
/** Check if the edge info contains the property group in certain adj list structure.
*
* @param property_group the property group to check.
* @param adj_list_type the type of adj list structure.
* @return true if the edge info contains the property group in cerain adj list structure.
* If edge info not support the given adj list type or not contains the proerpty group in the adj list structure,
* @return true if the edge info contains the property group in certain adj list structure.
* If edge info not support the given adj list type or not contains the property group in the adj list structure,
* return false.
*/
def containPropertyGroup(property_group: PropertyGroup, adj_list_type: AdjListType.Value): Boolean = {
Expand Down Expand Up @@ -388,7 +388,7 @@ class EdgeInfo() {
/** Get the path prefix of the adjacency list topology chunk for the given
* adjacency list type.
* @param adj_list_type type of adj list structure.
* @return path prfix of of the adjacency list topology.
* @return path prefix of of the adjacency list topology.
*/
def getAdjListPathPrefix(adj_list_type: AdjListType.Value) : String = {
return prefix + getAdjListPrefix(adj_list_type) + "adj_list/"
Expand Down
2 changes: 1 addition & 1 deletion spark/src/main/scala/com/alibaba/graphar/VertexInfo.scala
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ class VertexInfo() {
/** Get the data type of property.
*
* @param property_name name of the property.
* @return the data type in gar of the proeprty. If the vertex info does not contains the property,
* @return the data type in gar of the property. If the vertex info does not contains the property,
* raise IllegalArgumentException error.
*/
def getPropertyType(property_name: String): GarType.Value = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import org.apache.spark.sql.functions._
/** Reader for edge chunks.
*
* @constructor create a new edge reader with edge info and AdjList type.
* @param prefix the absolute perfix.
* @param prefix the absolute prefix.
* @param edgeInfo the edge info that describes the edge type.
* @param adjListType the adj list type for the edge.
* @param spark spark session for the reader to read chunks as Spark DataFrame.
Expand Down Expand Up @@ -205,7 +205,7 @@ class EdgeReader(prefix: String, edgeInfo: EdgeInfo, adjListType: AdjListType.V
}
}

/** Load the chunks for mutiple property groups of a vertex chunk as a DataFrame.
/** Load the chunks for multiple property groups of a vertex chunk as a DataFrame.
*
* @param propertyGroups list of property groups.
* @param vertex_chunk_index index of vertex chunk.
Expand Down
16 changes: 8 additions & 8 deletions spark/src/test/scala/com/alibaba/graphar/TestReader.scala
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ class ReaderSuite extends AnyFunSuite {
// read vertex chunk files in Parquet
val parquet_file_path = "gar-test/ldbc_sample/parquet/"
val parquet_prefix = getClass.getClassLoader.getResource(parquet_file_path).getPath
val parqeut_read_path = parquet_prefix + "vertex/person/id"
val df1 = spark.read.option("fileFormat", "parquet").format("com.alibaba.graphar.datasources.GarDataSource").load(parqeut_read_path)
val parquet_read_path = parquet_prefix + "vertex/person/id"
val df1 = spark.read.option("fileFormat", "parquet").format("com.alibaba.graphar.datasources.GarDataSource").load(parquet_read_path)
// validate reading results
assert(df1.rdd.getNumPartitions == 10)
assert(df1.count() == 903)
Expand Down Expand Up @@ -160,12 +160,12 @@ class ReaderSuite extends AnyFunSuite {
// test reading multiple property groups
var property_groups = new java.util.ArrayList[PropertyGroup]()
property_groups.add(property_group)
val mutiple_property_df_chunk_2 = reader.readMultipleEdgePropertyGroupsForVertexChunk(property_groups, 2, false)
assert(mutiple_property_df_chunk_2.columns.size == 1)
assert(mutiple_property_df_chunk_2.count() == 1077)
val mutiple_property_df = reader.readMultipleEdgePropertyGroups(property_groups, false)
assert(mutiple_property_df.columns.size == 1)
assert(mutiple_property_df.count() == 6626)
val multiple_property_df_chunk_2 = reader.readMultipleEdgePropertyGroupsForVertexChunk(property_groups, 2, false)
assert(multiple_property_df_chunk_2.columns.size == 1)
assert(multiple_property_df_chunk_2.count() == 1077)
val multiple_property_df = reader.readMultipleEdgePropertyGroups(property_groups, false)
assert(multiple_property_df.columns.size == 1)
assert(multiple_property_df.count() == 6626)

// test reading all property groups
val all_property_df_chunk_2 = reader.readAllEdgePropertyGroupsForVertexChunk(2, false)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class TransformExampleSuite extends AnyFunSuite {
.master("local[*]")
.getOrCreate()

test("tranform file type") {
test("transform file type") {
// read from orc files
val file_path = "gar-test/ldbc_sample/orc/"
val prefix = getClass.getClassLoader.getResource(file_path).getPath
Expand Down Expand Up @@ -63,7 +63,7 @@ class TransformExampleSuite extends AnyFunSuite {
fs.close()
}

test("tranform adjList type") {
test("transform adjList type") {
val file_path = "gar-test/ldbc_sample/parquet/"
val prefix = getClass.getClassLoader.getResource(file_path).getPath
// get vertex num
Expand Down

0 comments on commit e19c553

Please sign in to comment.