diff --git a/.changeset/neat-jeans-repair.md b/.changeset/neat-jeans-repair.md new file mode 100644 index 0000000..4868c18 --- /dev/null +++ b/.changeset/neat-jeans-repair.md @@ -0,0 +1,5 @@ +--- +"mocha-chai-rdf": patch +--- + +Added store functions to in-memory stream client diff --git a/lib/sparql-clients.ts b/lib/sparql-clients.ts index 85967b3..5e6a07b 100644 --- a/lib/sparql-clients.ts +++ b/lib/sparql-clients.ts @@ -1,5 +1,6 @@ +/* eslint-disable camelcase */ import type * as Oxigraph from 'oxigraph' -import type { Quad, Term } from '@rdfjs/types' +import type { NamedNode, Quad, Quad_Graph, Term, DefaultGraph, Stream } from '@rdfjs/types' import rdf from '@zazuko/env-node' import toStream from 'into-stream' import type { ParsingClient } from 'sparql-http-client/ParsingClient.js' @@ -51,7 +52,7 @@ export function parsingClient(store: Oxigraph.Store): ParsingClient { ask: ask.bind(null, store), update: update.bind(null, store), }, - store: {} as unknown as ParsingClient['store'], + store: undefined, } } @@ -69,6 +70,22 @@ export function streamClient(store: Oxigraph.Store): StreamClient { ask: ask.bind(null, store), update: update.bind(null, store), }, - store: {} as unknown as StreamClient['store'], + store: { + get(graph: Quad_Graph) { + return rdf.dataset(store.match(null, null, null, graph)).toStream() + }, + async post(stream: Stream, { graph: to_graph_name }: { graph?: NamedNode | DefaultGraph } = {}) { + const data = await rdf.dataset().import(stream) + store.load(data.toCanonical(), { to_graph_name, format: 'nt' }) + }, + async put(stream: Stream, options?: { graph?: NamedNode | DefaultGraph }) { + if (options?.graph?.termType === 'NamedNode') { + store.update(`CLEAR SILENT GRAPH <${options?.graph?.value}>`) + } else { + store.update('CLEAR DEFAULT') + } + await this.post(stream, options) + }, + }, } } diff --git a/package-lock.json b/package-lock.json index 5220186..7017b4a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,15 +1,16 @@ { "name": "mocha-chai-rdf", - "version": "0.1.4", + "version": "0.1.5", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "mocha-chai-rdf", - "version": "0.1.4", + "version": "0.1.5", "license": "MIT", "dependencies": { "@rdfjs/to-ntriples": "^3.0.1", + "@types/sparql-http-client": "^3.0.5", "@zazuko/env-node": "^2.1.3", "into-stream": "^8.0.1", "mocha-chai-jest-snapshot": "^1.1.6", @@ -24,11 +25,10 @@ "@types/glob": "^8.1.0", "@types/mocha": "^10.0.7", "@types/rdfjs__to-ntriples": "^3.0.0", - "@types/sparql-http-client": "^3.0.3", "@typescript-eslint/eslint-plugin": "^7", "@typescript-eslint/parser": "^7", "c8": "^10.1.2", - "chai": "^5", + "chai": "^5.1.2", "eslint": "^8", "eslint-import-resolver-typescript": "^3.6.3", "get-stream": "^9.0.1", @@ -2366,10 +2366,9 @@ "license": "MIT" }, "node_modules/@types/sparql-http-client": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/sparql-http-client/-/sparql-http-client-3.0.3.tgz", - "integrity": "sha512-/pbsdtF1VTRgjVGWqAKo8DXq/ULKJvhwnXnR1jfWSDFcvxC+MFxdkB4xOnwsJ6/6to4e3u0av0KY7EFlxW6c5Q==", - "dev": true, + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/sparql-http-client/-/sparql-http-client-3.0.5.tgz", + "integrity": "sha512-Af35Iop5zrOjObTo64Xx8IBs73Nf+dHWqnhulGWF9cTtF+BUUBz8pDiUyiq3wMrKqhiljSyrT6WYcRGb3DVAKQ==", "license": "MIT", "dependencies": { "@rdfjs/types": ">=1.0.0", @@ -3372,9 +3371,9 @@ "license": "Apache-2.0" }, "node_modules/chai": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.1.tgz", - "integrity": "sha512-pT1ZgP8rPNqUgieVaEY+ryQr6Q4HXNg8Ei9UnLUrjN4IA7dvQC5JB+/kxVcPNDHyBcc/26CXPkbNzq3qwrOEKA==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.2.tgz", + "integrity": "sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==", "dev": true, "license": "MIT", "dependencies": { diff --git a/package.json b/package.json index 161acbd..97fd5d2 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,7 @@ ], "dependencies": { "@rdfjs/to-ntriples": "^3.0.1", + "@types/sparql-http-client": "^3.0.5", "@zazuko/env-node": "^2.1.3", "into-stream": "^8.0.1", "mocha-chai-jest-snapshot": "^1.1.6", @@ -36,11 +37,10 @@ "@types/glob": "^8.1.0", "@types/mocha": "^10.0.7", "@types/rdfjs__to-ntriples": "^3.0.0", - "@types/sparql-http-client": "^3.0.3", "@typescript-eslint/eslint-plugin": "^7", "@typescript-eslint/parser": "^7", "c8": "^10.1.2", - "chai": "^5", + "chai": "^5.1.2", "eslint": "^8", "eslint-import-resolver-typescript": "^3.6.3", "get-stream": "^9.0.1", diff --git a/test/__snapshots__/index.test.ts.snap b/test/__snapshots__/index.test.ts.snap index dc492c0..af581ad 100644 --- a/test/__snapshots__/index.test.ts.snap +++ b/test/__snapshots__/index.test.ts.snap @@ -22,10 +22,15 @@ exports[`mocha-chai-rdf test suites 3`] = ` ✔ sparql-clients.js parsingClient can be queried with CONSTRUCT: 0ms ✔ sparql-clients.js parsingClient can be queried with ASK: 0ms ✔ sparql-clients.js parsingClient can be updated: 0ms - ✔ sparql-clients.js sparqlClient can be queried with SELECT: 0ms - ✔ sparql-clients.js sparqlClient can be queried with CONSTRUCT: 0ms - ✔ sparql-clients.js sparqlClient can be queried with ASK: 0ms - ✔ sparql-clients.js sparqlClient can be updated: 0ms + ✔ sparql-clients.js streamClient query can be queried with SELECT: 0ms + ✔ sparql-clients.js streamClient query can be queried with CONSTRUCT: 0ms + ✔ sparql-clients.js streamClient query can be queried with ASK: 0ms + ✔ sparql-clients.js streamClient query can be updated: 0ms + ✔ sparql-clients.js streamClient store get can fetch a select graph: 0ms + ✔ sparql-clients.js streamClient store post can add to a named graph: 0ms + ✔ sparql-clients.js streamClient store post can add to default graph: 0ms + ✔ sparql-clients.js streamClient store put can replace named graph: 0ms + ✔ sparql-clients.js streamClient store put can replace default graph data: 0ms ✔ snapshots.js can be used to match canonical representation: 0ms ✔ snapshots.js preserves original functionality: 0ms ✔ matchers.js term can compare oxigraph with @rdfjs: 0ms @@ -38,7 +43,7 @@ exports[`mocha-chai-rdf test suites 3`] = ` ✔ matchers.js term when actual and expected objects are pointers succeeds when equal multi-pointers: 0ms 4) matchers.js term when actual and expected objects are pointers fails when not equal multi-pointers - 24 passing (0ms) + 29 passing (0ms) 4 failing 1) store.js diff --git a/test/tests/sparql-clients.ts b/test/tests/sparql-clients.ts index 3d6ed07..b96fb7f 100644 --- a/test/tests/sparql-clients.ts +++ b/test/tests/sparql-clients.ts @@ -14,6 +14,8 @@ const require = module.createRequire(import.meta.url) use(matchers) +const ex = rdf.namespace('http://example.com/') + describe('sparql-clients.js', () => { describe('parsingClient', () => { let client: ParsingClient @@ -90,11 +92,11 @@ describe('sparql-clients.js', () => { }) }) - describe('sparqlClient', () => { + describe('streamClient', () => { let client: StreamClient let store: Store - before(() => { + beforeEach(() => { store = new Store() store.load(fs.readFileSync(require.resolve('tbbt-ld/dist/tbbt.nq')).toString(), { format: 'application/n-quads', @@ -102,70 +104,150 @@ describe('sparql-clients.js', () => { client = streamClient(store) }) - it('can be queried with SELECT', async () => { - const stream = client.query.select(` - SELECT ?name WHERE { - ?name - } - `) + context('query', () => { + it('can be queried with SELECT', async () => { + const stream = client.query.select(` + SELECT ?name WHERE { + ?name + } + `) - const results = await getStreamAsArray(stream) + const results = await getStreamAsArray(stream) - expect(results).to.deep.equal([ - { name: rdf.literal('Amy') }, - ]) - }) + expect(results).to.deep.equal([ + { name: rdf.literal('Amy') }, + ]) + }) - it('can be queried with CONSTRUCT', async () => { - const stream = client.query.construct(` - CONSTRUCT WHERE { - ?name - } - `) + it('can be queried with CONSTRUCT', async () => { + const stream = client.query.construct(` + CONSTRUCT WHERE { + ?name + } + `) - const results = await getStreamAsArray(stream) + const results = await getStreamAsArray(stream) - expect([...results][0]).to.equal(rdf.quad( - rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler'), - rdf.ns.schema.givenName, - rdf.literal('Amy'), - )) - }) + expect([...results][0]).to.equal(rdf.quad( + rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler'), + rdf.ns.schema.givenName, + rdf.literal('Amy'), + )) + }) - it('can be queried with ASK', async () => { - const result = await client.query.ask(` - ASK { - ?person ?name - } - `) + it('can be queried with ASK', async () => { + const result = await client.query.ask(` + ASK { + ?person ?name + } + `) - expect(result).to.equal(true) - }) + expect(result).to.equal(true) + }) - it('can be updated', async () => { - await client.query.update(` - INSERT { - GRAPH ?g { - ?person ?newName + it('can be updated', async () => { + await client.query.update(` + INSERT { + GRAPH ?g { + ?person ?newName + } } - } - WHERE { - GRAPH ?g { - ?person ?name ; - ?familyName . - BIND(CONCAT(?name, " ", ?familyName) AS ?newName) + WHERE { + GRAPH ?g { + ?person ?name ; + ?familyName . + BIND(CONCAT(?name, " ", ?familyName) AS ?newName) + } } - } - `) + `) + + expect(store.match(rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler'), rdf.ns.schema.name)).to.deep.equal([ + rdf.quad( + rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler'), + rdf.ns.schema.name, + rdf.literal('Amy Fowler'), + rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler'), + ), + ]) + }) + }) - expect(store.match(rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler'), rdf.ns.schema.name)).to.deep.equal([ - rdf.quad( - rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler'), - rdf.ns.schema.name, - rdf.literal('Amy Fowler'), - rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler'), - ), - ]) + context('store', () => { + context('get', () => { + it('can fetch a select graph', async () => { + // when + const stream = client.store.get(rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler')) + const dataset = await rdf.dataset().import(stream) + + // then + expect(dataset).to.have.property('size', 12) + }) + }) + + context('post', () => { + it('can add to a named graph', async () => { + // given + const data = rdf.clownface() + .namedNode(ex.foo) + .addOut(rdf.ns.schema.name, 'Foo') + + // when + await client.store.post(data.dataset.toStream(), { + graph: ex.Foo, + }) + + // then + expect(store.match(null, null, null, ex.Foo)).to.have.length(1) + }) + + it('can add to default graph', async () => { + // given + const data = rdf.clownface() + .namedNode(ex.foo) + .addOut(rdf.ns.schema.name, 'Foo') + + // when + await client.store.post(data.dataset.toStream()) + + // then + expect(store.match(null, null, null, rdf.defaultGraph())).to.have.length(1) + }) + }) + + context('put', () => { + it('can replace named graph', async () => { + // given + const graph = rdf.namedNode('http://localhost:8080/data/person/amy-farrah-fowler') + const data = rdf.clownface() + .namedNode(ex.foo) + .addOut(rdf.ns.schema.name, 'Foo') + + // when + await client.store.put(data.dataset.toStream(), { graph }) + + // then + expect(store.match(null, null, null, graph)).to.have.length(1) + }) + + it('can replace default graph data', async () => { + // given + const foo = rdf.clownface() + .namedNode(ex.foo) + .addOut(rdf.ns.schema.name, 'Foo') + const bar = rdf.clownface() + .namedNode(ex.foo) + .addOut(rdf.ns.schema.name, 'Bar') + + // when + await client.store.put(foo.dataset.toStream()) + await client.store.put(bar.dataset.toStream()) + + // then + const matched = store.match(ex.foo, null, null, rdf.defaultGraph()) + expect(matched).to.have.length(1) + expect(matched[0].object.value).to.have.eq('Bar') + }) + }) }) }) })