Skip to content

Commit

Permalink
feat: topic list (#14)
Browse files Browse the repository at this point in the history
Co-authored-by: Alexandre ABRIOUX <alexandre-abrioux@users.noreply.github.com>
  • Loading branch information
benjlevesque and alexandre-abrioux authored Dec 13, 2023
1 parent a4910a1 commit 19db1fc
Show file tree
Hide file tree
Showing 7 changed files with 79 additions and 23 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ yarn codegen ./subgraph-private.yaml
```
yarn test
# or, run in Docker
yarn test -d
yarn test:docker
```

- Build the subgraph. Do this again if you modify the [indexer's code](./src/mapping.ts) or the [graphql schema](./schema.graphql),
Expand Down
9 changes: 9 additions & 0 deletions schema.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ type Transaction @entity {
"the keccak256 hash of the transaction's contents"
dataHash: String!
channelId: String!
channel: Channel!
data: String

encryptedData: String
Expand All @@ -20,3 +21,11 @@ type Transaction @entity {
size: String!
topics: [String!]
}

type Channel @entity {
"the channelId"
id: ID!
"combined topics of all transactions in the channel"
topics: [String!]!
transactions: [Transaction!]! @derivedFrom(field: "channel")
}
29 changes: 22 additions & 7 deletions src/mapping.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { Bytes, ipfs, json, JSONValueKind, log } from "@graphprotocol/graph-ts";
import { NewHash } from "../generated/Contract/Contract";
import { Transaction } from "../generated/schema";
import { Channel, Transaction } from "../generated/schema";
import { computeHash, serializeTransaction } from "./hash-utils";
/**
* Handle a NewHash event
Expand Down Expand Up @@ -43,10 +43,9 @@ export function handleNewHash(event: NewHash): void {

let header = doc.get("header")!.toObject();
if (!header.isSet("channelIds") || !header.isSet("topics")) {
log.warning(
"IPFS document {} has a no header.channelIds or header.topics",
[ipfsHash],
);
log.warning("IPFS document {} has no header.channelIds or header.topics", [
ipfsHash,
]);
return;
}
let channelIds = header.get("channelIds")!.toObject().entries;
Expand All @@ -57,16 +56,31 @@ export function handleNewHash(event: NewHash): void {
let channelId = channelIds[txIndex].key;
let index = channelIds[txIndex].value.toArray()[0].toBigInt().toI32();
log.info("parsing channelId {} for ipfsId {}", [channelId, ipfsHash]);
let entity = new Transaction(ipfsHash + "-" + index.toString());
let entityId = ipfsHash + "-" + index.toString();
let entity = new Transaction(entityId);
let channel = Channel.load(channelId);
if (!channel) {
log.debug("new channel {}", [channelId]);
channel = new Channel(channelId);
channel.topics = [];
}
let transaction = transactions[index].toObject();
entity.channel = channelId;

let topicList: string[] = [];
let channelTopicList = channel.topics;
if (topics.isSet(channelId)) {
let topicsJsonVal = topics.get(channelId)!.toArray();
for (let i = 0; i < topicsJsonVal.length; ++i) {
topicList.push(topicsJsonVal[i].toString());
let topic = topicsJsonVal[i].toString();
topicList.push(topic);
if (!channel.topics.includes(topic)) {
channelTopicList.push(topic);
}
}
}
channelTopicList.sort();
channel.topics = channelTopicList;

entity.hash = ipfsHash;
entity.channelId = channelId;
Expand Down Expand Up @@ -114,5 +128,6 @@ export function handleNewHash(event: NewHash): void {
}
entity.dataHash = computeHash(serializeTransaction(entity));
entity.save();
channel.save();
}
}
16 changes: 16 additions & 0 deletions tests/ipfs/transaction-clear-2.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"header": {
"channelIds": {
"channel1": [0]
},
"topics": {
"channel1": ["topic1", "topic3"]
},
"version": "0.1.0"
},
"transactions": [
{
"data": "subsequent transaction for channel1, with different topics"
}
]
}
2 changes: 1 addition & 1 deletion tests/ipfs/transaction-clear.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"channel1": [0]
},
"topics": {
"channel1": ["topic1"]
"channel1": ["topic1", "topic2"]
},
"version": "0.1.0"
},
Expand Down
41 changes: 29 additions & 12 deletions tests/transaction-data.test.ts
Original file line number Diff line number Diff line change
@@ -1,37 +1,54 @@
import { afterEach, assert, clearStore, describe, test } from "matchstick-as";
import { processIpfs } from "./utils";

const hash = "testIpfsHash";
const entityId = hash + "-0";
const entityType = "Transaction";
describe("Transaction Data", () => {
afterEach(() => {
clearStore();
});

test("should process clear transactions", () => {
processIpfs("transaction-clear.json");
assert.entityCount("Transaction", 1);
assert.fieldEquals("Transaction", "testIpfsHash-0", "hash", "testIpfsHash");
processIpfs("transaction-clear.json", hash);
assert.entityCount(entityType, 1);
assert.fieldEquals(entityType, entityId, "hash", hash);
assert.fieldEquals(entityType, entityId, "channelId", "channel1");
assert.fieldEquals(entityType, entityId, "topics", "[topic1, topic2]");
assert.fieldEquals(
"Transaction",
"testIpfsHash-0",
entityType,
entityId,
"dataHash",
"0x09c043a7cb846a048c2e665b832add72046fd9913e05f0c20e7f4f360ce62a8d",
);

processIpfs("transaction-clear-2.json", "secondTx");
assert.entityCount(entityType, 2);
assert.fieldEquals(entityType, "secondTx-0", "channelId", "channel1");
assert.fieldEquals(entityType, "secondTx-0", "topics", "[topic1, topic3]");
assert.entityCount("Channel", 1);
assert.fieldEquals(
"Channel",
"channel1",
"topics",
"[topic1, topic2, topic3]",
);
});

test("should process encrypted transactions", () => {
processIpfs("transaction-encrypted.json");
assert.entityCount("Transaction", 1);
assert.fieldEquals("Transaction", "testIpfsHash-0", "hash", "testIpfsHash");
processIpfs("transaction-encrypted.json", hash);
assert.entityCount(entityType, 1);
assert.fieldEquals(entityType, entityId, "hash", hash);
assert.fieldEquals(
"Transaction",
"testIpfsHash-0",
entityType,
entityId,
"dataHash",
"0x3e4bc69070baf9ed30ef58b87b454fd19841f18dd83f7dbe8b311bb32befc483",
);
});

test("should ignore wrong transaction data", () => {
processIpfs("transaction-data-invalid-object.json");
assert.entityCount("Transaction", 0);
processIpfs("transaction-data-invalid-object.json", hash);
assert.entityCount(entityType, 0);
});
});
3 changes: 1 addition & 2 deletions tests/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@ import { NewHash } from "../generated/Contract/Contract";
import { Address, Bytes, ethereum } from "@graphprotocol/graph-ts";
import { handleNewHash } from "../src/mapping";

export const processIpfs = (fileName: string): void => {
const ipfsHash = "testIpfsHash";
export const processIpfs = (fileName: string, ipfsHash: string): void => {
mockIpfsFile(ipfsHash, `tests/ipfs/${fileName}`);

// @ts-ignore
Expand Down

0 comments on commit 19db1fc

Please sign in to comment.