diff --git a/java/arcs/core/data/SchemaRegistry.kt b/java/arcs/core/data/SchemaRegistry.kt new file mode 100644 index 00000000000..298f67b4d00 --- /dev/null +++ b/java/arcs/core/data/SchemaRegistry.kt @@ -0,0 +1,28 @@ +/* + * Copyright 2020 Google LLC. + * + * This code may only be used under the BSD style license found at + * http://polymer.github.io/LICENSE.txt + * + * Code distributed by Google as part of this project is also subject to an additional IP rights + * grant found at + * http://polymer.github.io/PATENTS.txt + */ +package arcs.core.data + +typealias SchemaHash = String + +/** + * A registry for generated [Schema]s. + */ +object SchemaRegistry { + private val schemas = mutableMapOf() + + /** Store a [Schema] in the registry. */ + fun register(schema: Schema) { + schemas[schema.hash] = schema + } + + /** Given a [SchemaHash], return the [Schema] for that hash, if it exists. */ + operator fun get(hash: SchemaHash) = schemas[hash] +} diff --git a/java/arcs/core/data/proto/recipe.proto b/java/arcs/core/data/proto/recipe.proto index b4ff66a211c..200b2938493 100644 --- a/java/arcs/core/data/proto/recipe.proto +++ b/java/arcs/core/data/proto/recipe.proto @@ -121,6 +121,7 @@ message TypeVariableProto { message SchemaProto { repeated string names = 1; map fields = 2; + string hash = 3; } enum OPERATOR { diff --git a/java/arcs/core/storage/api/BUILD b/java/arcs/core/storage/api/BUILD index 77b44a15923..361d7552992 100644 --- a/java/arcs/core/storage/api/BUILD +++ b/java/arcs/core/storage/api/BUILD @@ -12,7 +12,7 @@ arcs_kt_library( srcs = glob(["*.kt"]), deps = [ "//java/arcs/core/common", - "//java/arcs/core/data:rawentity", + "//java/arcs/core/data", "//java/arcs/core/data/util:data-util", "//third_party/kotlin/kotlinx_coroutines", ], diff --git a/java/arcs/core/storage/api/Entity.kt b/java/arcs/core/storage/api/Entity.kt index 5d0b4e3dac1..9f6472cfc1b 100644 --- a/java/arcs/core/storage/api/Entity.kt +++ b/java/arcs/core/storage/api/Entity.kt @@ -13,6 +13,7 @@ package arcs.core.storage.api import arcs.core.common.Referencable import arcs.core.data.RawEntity +import arcs.core.data.Schema import arcs.core.data.util.ReferencablePrimitive import kotlin.IllegalArgumentException import kotlin.reflect.KClass @@ -38,6 +39,9 @@ interface EntitySpec { * TODO: replace this with kotlinx.serialization */ fun deserialize(data: RawEntity): T + + /** Returns the corresponding [Schema] for the specified [Entity]. */ + fun schema(): Schema } /** diff --git a/src/tools/schema2base.ts b/src/tools/schema2base.ts index 7687c6824c6..94bd25aac52 100644 --- a/src/tools/schema2base.ts +++ b/src/tools/schema2base.ts @@ -15,8 +15,16 @@ import {Runtime} from '../runtime/runtime.js'; import {SchemaGraph, SchemaNode} from './schema2graph.js'; import {ParticleSpec} from '../runtime/particle-spec.js'; +export type AddFieldOptions = Readonly<{ + field: string; + typeChar: string; + isOptional?: boolean; + refClassName?: string; + isCollection?: boolean; +}>; + export interface ClassGenerator { - addField(field: string, typeChar: string, isOptional: boolean, refClassName: string|null): void; + addField(opts: AddFieldOptions): void; generate(schemaHash: string, fieldCount: number): string; } @@ -78,14 +86,20 @@ export abstract class Schema2Base { for (const [field, descriptor] of fields) { if (descriptor.kind === 'schema-primitive') { if (['Text', 'URL', 'Number', 'Boolean'].includes(descriptor.type)) { - generator.addField(field, descriptor.type[0], false, null); + generator.addField({field, typeChar: descriptor.type[0]}); } else { throw new Error(`Schema type '${descriptor.type}' for field '${field}' is not supported`); } } else if (descriptor.kind === 'schema-reference') { - generator.addField(field, 'R', false, node.refs.get(field).name); + generator.addField({field, typeChar: 'R', refClassName: node.refs.get(field).name}); } else if (descriptor.kind === 'schema-collection' && descriptor.schema.kind === 'schema-reference') { // TODO: support collections of references + } else if (descriptor.kind === 'schema-collection') { + const schema = descriptor.schema; + if (!['Text', 'URL', 'Number', 'Boolean'].includes(schema.type)) { + throw new Error(`Schema type '${schema.type}' for field '${field}' is not supported`); + } + generator.addField({field, typeChar: schema.type[0], isCollection: true}); } else { throw new Error(`Schema kind '${descriptor.kind}' for field '${field}' is not supported`); } diff --git a/src/tools/schema2cpp.ts b/src/tools/schema2cpp.ts index 1037b67948a..31853f5ef5d 100644 --- a/src/tools/schema2cpp.ts +++ b/src/tools/schema2cpp.ts @@ -7,7 +7,7 @@ * subject to an additional IP rights grant found at * http://polymer.github.io/PATENTS.txt */ -import {Schema2Base, ClassGenerator} from './schema2base.js'; +import {Schema2Base, ClassGenerator, AddFieldOptions} from './schema2base.js'; import {SchemaNode} from './schema2graph.js'; import {ParticleSpec} from '../runtime/particle-spec.js'; import {Type} from '../runtime/type.js'; @@ -113,7 +113,7 @@ class CppGenerator implements ClassGenerator { constructor(readonly node: SchemaNode, readonly namespace: string) {} - addField(field: string, typeChar: string, isOptional: boolean, refClassName: string|null) { + addField({field, typeChar, refClassName, isOptional = false, isCollection = false}: AddFieldOptions) { const fixed = fixName(field); const valid = `${field}_valid_`; let {type, defaultVal, isString} = typeMap[typeChar]; diff --git a/src/tools/schema2kotlin.ts b/src/tools/schema2kotlin.ts index b0eeb92c654..60ab7b67a12 100644 --- a/src/tools/schema2kotlin.ts +++ b/src/tools/schema2kotlin.ts @@ -7,7 +7,7 @@ * subject to an additional IP rights grant found at * http://polymer.github.io/PATENTS.txt */ -import {Schema2Base, ClassGenerator} from './schema2base.js'; +import {Schema2Base, ClassGenerator, AddFieldOptions} from './schema2base.js'; import {SchemaNode} from './schema2graph.js'; import {ParticleSpec} from '../runtime/particle-spec.js'; import minimist from 'minimist'; @@ -28,10 +28,10 @@ const keywords = [ ]; const typeMap = { - 'T': {type: 'String', decodeFn: 'decodeText()', defaultVal: `""`}, - 'U': {type: 'String', decodeFn: 'decodeText()', defaultVal: `""`}, - 'N': {type: 'Double', decodeFn: 'decodeNum()', defaultVal: '0.0'}, - 'B': {type: 'Boolean', decodeFn: 'decodeBool()', defaultVal: 'false'}, + 'T': {type: 'String', decodeFn: 'decodeText()', defaultVal: `""`, schemaType: 'FieldType.Text'}, + 'U': {type: 'String', decodeFn: 'decodeText()', defaultVal: `""`, schemaType: 'FieldType.Text'}, + 'N': {type: 'Double', decodeFn: 'decodeNum()', defaultVal: '0.0', schemaType: 'FieldType.Number'}, + 'B': {type: 'Boolean', decodeFn: 'decodeBool()', defaultVal: 'false', schemaType: 'FieldType.Boolean'}, }; export class Schema2Kotlin extends Schema2Base { @@ -54,7 +54,14 @@ package ${this.scope} // Current implementation doesn't support references or optional field detection import arcs.sdk.* -${this.opts.wasm ? 'import arcs.sdk.wasm.*' : 'import arcs.core.storage.api.toPrimitiveValue\nimport arcs.core.data.RawEntity\nimport arcs.core.data.util.toReferencable\nimport arcs.core.data.util.ReferencablePrimitive'} +${this.opts.wasm ? + `import arcs.sdk.wasm.*` : + `\ +import arcs.sdk.Entity +import arcs.core.data.* +import arcs.core.data.util.toReferencable +import arcs.core.data.util.ReferencablePrimitive +import arcs.core.storage.api.toPrimitiveValue`} `; } @@ -130,7 +137,7 @@ abstract class Abstract${particleName} : ${this.opts.wasm ? 'WasmParticleImpl' : } } -class KotlinGenerator implements ClassGenerator { +export class KotlinGenerator implements ClassGenerator { fields: string[] = []; fieldVals: string[] = []; setFields: string[] = []; @@ -144,11 +151,13 @@ class KotlinGenerator implements ClassGenerator { fieldSerializes: string[] = []; fieldDeserializes: string[] = []; fieldsForToString: string[] = []; + singletonSchemaFields: string[] = []; + collectionSchemaFields: string[] = []; constructor(readonly node: SchemaNode, private readonly opts: minimist.ParsedArgs) {} // TODO: allow optional fields in kotlin - addField(field: string, typeChar: string, isOptional: boolean, refClassName: string|null) { + addField({field, typeChar, refClassName, isOptional = false, isCollection = false}: AddFieldOptions) { // TODO: support reference types in kotlin if (typeChar === 'R') return; @@ -181,6 +190,46 @@ class KotlinGenerator implements ClassGenerator { this.fieldSerializes.push(`"${field}" to ${fixed}.toReferencable()`); this.fieldDeserializes.push(`${fixed} = data.singletons["${fixed}"].toPrimitiveValue(${type}::class, ${defaultVal})`); this.fieldsForToString.push(`${fixed} = $${fixed}`); + if (isCollection) { + this.collectionSchemaFields.push(`"${field}" to ${typeMap[typeChar].schemaType}`); + } else { + this.singletonSchemaFields.push(`"${field}" to ${typeMap[typeChar].schemaType}`); + } + } + + mapOf(items: string[]): string { + switch (items.length) { + case 0: + return `emptyMap()`; + case 1: + return `mapOf(${items[0]})`; + default: + return `\ +mapOf( +${this.leftPad(items.join(',\n'), 4)} +)`; + } + + } + + createSchema(schemaHash: string): string { + const schemaNames = this.node.schema.names.map(n => `SchemaName("${n}")`); + return `\ +Schema( + listOf(${schemaNames.join(',\n' + ' '.repeat(8))}), + SchemaFields( + singletons = ${this.leftPad(this.mapOf(this.singletonSchemaFields), 8, true)}, + collections = ${this.leftPad(this.mapOf(this.collectionSchemaFields), 8, true)} + ), + "${schemaHash}" +)`; + } + + leftPad(input: string, indent: number, skipFirst: boolean = false) { + return input + .split('\n') + .map((line: string, idx: number) => (idx === 0 && skipFirst) ? line : ' '.repeat(indent) + line) + .join('\n'); } generate(schemaHash: string, fieldCount: number): string { @@ -256,7 +305,18 @@ ${this.opts.wasm ? ` } class ${name}_Spec() : ${this.getType('EntitySpec')}<${name}> { +${this.opts.wasm ? '' : `\ + companion object { + val schema = ${this.leftPad(this.createSchema(schemaHash), 8, true)} + + init { + SchemaRegistry.register(schema) + } + } + + override fun schema() = schema +`} override fun create() = ${name}() ${!this.opts.wasm ? ` override fun deserialize(data: RawEntity): ${name} { diff --git a/src/tools/storage-key-recipe-resolver.ts b/src/tools/storage-key-recipe-resolver.ts index 7032e274102..0ee9b8957b1 100644 --- a/src/tools/storage-key-recipe-resolver.ts +++ b/src/tools/storage-key-recipe-resolver.ts @@ -32,13 +32,14 @@ export class StorageKeyRecipeResolver { /** * Produces resolved recipes with storage keys. * - * TODO(alxr): Apply to long-running recipes appropriately. + * TODO(#4818) Add passes to assign storage keys. * @throws Error if recipe fails to resolve on first or second pass. * @yields Resolved recipes with storage keys */ async resolve(): Promise { const recipes = []; for (const recipe of this.runtime.context.allRecipes) { + this.validateHandles(recipe); const arc = this.runtime.newArc(this.getArcId(recipe), ramDiskStorageKeyPrefixForTest()); const opts = {errors: new Map()}; const resolved = await this.tryResolve(recipe, arc, opts); @@ -63,7 +64,8 @@ export class StorageKeyRecipeResolver { */ async tryResolve(recipe: Recipe, arc: Arc, opts?: IsValidOptions): Promise { const normalized = recipe.clone(); - normalized.normalize(); + const successful = normalized.normalize(opts); + if (!successful) return null; if (normalized.isResolved()) return normalized; return await (new RecipeResolver(arc).resolve(recipe, opts)); @@ -97,14 +99,14 @@ export class StorageKeyRecipeResolver { } /** - * TODO(#4818) method to match `map` and `copy` fated handles with storage keys from `create` handles. + * Checks that handles are existent, disambiguous, and initiated by a long-running arc. * - * @throws when a mapped handle is associated with too many stores (ambiguous mapping). - * @throws when a mapped handle isn't associated with any store (no matches found). - * @throws when handle is mapped to a handle from an ephemeral recipe. + * @throws when a map or copy handle is associated with too many stores (ambiguous mapping). + * @throws when a map or copy handle isn't associated with any store (no matches found). + * @throws when a map or copy handle is associated with a handle from an ephemeral recipe. * @param recipe long-running or ephemeral recipe */ - matchKeysToHandles(recipe: Recipe) { + validateHandles(recipe: Recipe) { recipe.handles .filter(h => h.fate === 'map' || h.fate === 'copy') .forEach(handle => { @@ -121,8 +123,6 @@ export class StorageKeyRecipeResolver { if (!match.recipe.isLongRunning) { throw Error(`Handle ${handle.localName} mapped to ephemeral handle ${match.localName}.`); } - - handle.storageKey = match.storageKey; }); } } diff --git a/src/tools/tests/goldens/generated-schemas.jvm.kt b/src/tools/tests/goldens/generated-schemas.jvm.kt index e6c1626263f..0a36648cd14 100644 --- a/src/tools/tests/goldens/generated-schemas.jvm.kt +++ b/src/tools/tests/goldens/generated-schemas.jvm.kt @@ -9,10 +9,11 @@ package arcs.sdk // Current implementation doesn't support references or optional field detection import arcs.sdk.* -import arcs.core.storage.api.toPrimitiveValue -import arcs.core.data.RawEntity +import arcs.sdk.Entity +import arcs.core.data.* import arcs.core.data.util.toReferencable import arcs.core.data.util.ReferencablePrimitive +import arcs.core.storage.api.toPrimitiveValue class GoldInternal1() : Entity { @@ -71,6 +72,23 @@ class GoldInternal1() : Entity { class GoldInternal1_Spec() : EntitySpec { + companion object { + val schema = Schema( + listOf(), + SchemaFields( + singletons = mapOf("val" to FieldType.Text), + collections = emptyMap() + ), + "485712110d89359a3e539dac987329cd2649d889" + ) + + init { + SchemaRegistry.register(schema) + } + } + + override fun schema() = schema + override fun create() = GoldInternal1() override fun deserialize(data: RawEntity): GoldInternal1 { @@ -179,6 +197,28 @@ class Gold_Data() : Entity { class Gold_Data_Spec() : EntitySpec { + companion object { + val schema = Schema( + listOf(), + SchemaFields( + singletons = mapOf( + "num" to FieldType.Number, + "txt" to FieldType.Text, + "lnk" to FieldType.Text, + "flg" to FieldType.Boolean + ), + collections = emptyMap() + ), + "d8058d336e472da47b289eafb39733f77eadb111" + ) + + init { + SchemaRegistry.register(schema) + } + } + + override fun schema() = schema + override fun create() = Gold_Data() override fun deserialize(data: RawEntity): Gold_Data { diff --git a/src/tools/tests/schema2kotlin-test.ts b/src/tools/tests/schema2kotlin-test.ts new file mode 100644 index 00000000000..fd940158667 --- /dev/null +++ b/src/tools/tests/schema2kotlin-test.ts @@ -0,0 +1,41 @@ +/** + * @license + * Copyright (c) 2020 Google Inc. All rights reserved. + * This code may only be used under the BSD style license found at + * http://polymer.github.io/LICENSE.txt + * Code distributed by Google as part of this project is also + * subject to an additional IP rights grant found at + * http://polymer.github.io/PATENTS.txt + */ + + +import {assert} from '../../platform/chai-node.js'; +import {KotlinGenerator} from '../schema2kotlin.js'; +import {SchemaNode} from '../schema2graph.js'; +import {Schema} from '../../runtime/schema.js'; + + +describe('schema2wasm', () => { + describe('kotlin-generator', () => { + const ktGen = new KotlinGenerator(new SchemaNode(new Schema([], {}), 'dummyNode'), {arg: '', _: []}); + it('when no items are present, it creates an empty map', () => { + const actual = ktGen.mapOf([]); + + assert.strictEqual('emptyMap()', actual); + }); + it('when one item is present, it creates a single-line map', () => { + const actual = ktGen.mapOf([`"a" to "b"`]); + + assert.strictEqual('mapOf("a" to "b")', actual); + }); + it('when multiple items are present, it creates a multi-line map', () => { + const actual = ktGen.mapOf([`"a" to "b"`, `"b" to "c"`]); + + assert.strictEqual(`\ +mapOf( + "a" to "b", + "b" to "c" +)`, actual); + }); + }); +}); diff --git a/src/tools/tests/schema2wasm-test.ts b/src/tools/tests/schema2wasm-test.ts index e0dca1fde55..757bbca5df5 100644 --- a/src/tools/tests/schema2wasm-test.ts +++ b/src/tools/tests/schema2wasm-test.ts @@ -9,9 +9,8 @@ */ import {assert} from '../../platform/chai-web.js'; import {Manifest} from '../../runtime/manifest.js'; -import {Schema} from '../../runtime/schema.js'; import {Dictionary} from '../../runtime/hot.js'; -import {Schema2Base, ClassGenerator} from '../schema2base.js'; +import {Schema2Base, ClassGenerator, AddFieldOptions} from '../schema2base.js'; import {SchemaNode} from '../schema2graph.js'; import {Schema2Cpp} from '../schema2cpp.js'; import {Schema2Kotlin} from '../schema2kotlin.js'; @@ -32,7 +31,7 @@ class Schema2Mock extends Schema2Base { const collector = {count: 0, adds: []}; this.res[node.name] = collector; return { - addField(field: string, typeChar: string, isOptional: boolean, refClassName: string|null) { + addField({field, typeChar, isOptional, refClassName}: AddFieldOptions) { const refInfo = refClassName ? `<${refClassName}>` : ''; collector.adds.push(field + ':' + typeChar + refInfo + (isOptional ? '?' : '')); }, diff --git a/src/tools/tests/storage-key-recipe-resolver-test.ts b/src/tools/tests/storage-key-recipe-resolver-test.ts index 83fe5083db2..7468696d27d 100644 --- a/src/tools/tests/storage-key-recipe-resolver-test.ts +++ b/src/tools/tests/storage-key-recipe-resolver-test.ts @@ -11,10 +11,11 @@ import {Manifest} from '../../runtime/manifest.js'; import {assert} from '../../platform/chai-node.js'; import {StorageKeyRecipeResolver} from '../storage-key-recipe-resolver.js'; +import {assertThrowsAsync} from '../../testing/test-util.js'; describe('recipe2plan', () => { describe('storage-key-recipe-resolver', () => { - it('Resolves mapping a handle from a long running arc into another long running arc', async () => { + it('resolves mapping a handle from a long running arc into another long running arc', async () => { const manifest = await Manifest.parse(`\ particle Reader data: reads Thing {name: Text} @@ -28,7 +29,7 @@ describe('recipe2plan', () => { @trigger launch startup - arcId myArcId + arcId writeArcId recipe WritingRecipe thing: create persistent 'my-handle-id' Writer @@ -36,25 +37,126 @@ describe('recipe2plan', () => { @trigger launch startup - arcId otherArcId + arcId readArcId recipe ReadingRecipe data: map 'my-handle-id' Reader data: reads data`); const resolver = new StorageKeyRecipeResolver(manifest); - for (const it of (await resolver.resolve())) { + for (const it of (await resolver.resolve())) { assert.isTrue(it.isResolved()); } }); + it('fails to resolve mapping a handle from a short running arc into another short running arc', async () => { + const manifest = await Manifest.parse(`\ + particle Reader + data: reads Thing {name: Text} + + particle Writer + data: writes Thing {name: Text} + + recipe WritingRecipe + thing: create persistent 'my-handle-id' + Writer + data: writes thing + + recipe ReadingRecipe + data: map 'my-handle-id' + Reader + data: reads data`); + + const resolver = new StorageKeyRecipeResolver(manifest); + await assertThrowsAsync(async () => await resolver.resolve(), Error, 'Handle data mapped to ephemeral handle thing.'); + }); + it('fails to resolve mapping a handle from a short running arc into a long running arc', async () => { + const manifest = await Manifest.parse(`\ + particle Reader + data: reads Thing {name: Text} + + particle Writer + data: writes Thing {name: Text} + + recipe WritingRecipe + thing: create persistent 'my-handle-id' + Writer + data: writes thing + + @trigger + launch startup + arcId readArcId + recipe ReadingRecipe + data: map 'my-handle-id' + Reader + data: reads data`); + + const resolver = new StorageKeyRecipeResolver(manifest); + await assertThrowsAsync(async () => await resolver.resolve(), Error, 'Handle data mapped to ephemeral handle thing.'); + }); + it('resolves mapping a handle from a long running arc into a short running arc', async () => { + const manifest = await Manifest.parse(`\ + particle Reader + data: reads Thing {name: Text} + + particle Writer + data: writes Thing {name: Text} + + @trigger + launch startup + arcId writeArcId + recipe WritingRecipe + thing: create persistent 'my-handle-id' + Writer + data: writes thing + + recipe ReadingRecipe + data: map 'my-handle-id' + Reader + data: reads data`); + + const resolver = new StorageKeyRecipeResolver(manifest); + for (const it of await resolver.resolve()) { + assert.isTrue(it.isResolved()); + } + }); + it('Invalid Type: If Reader reads {name: Text, age: Number} it is not valid', async () => { + const manifest = await Manifest.parse(`\ + particle Reader + data: reads Thing {name: Text, age: Number} + + particle Writer + data: writes Thing {name: Text} + + @trigger + launch startup + arcId writeArcId + recipe WritingRecipe + thing: create persistent 'my-handle-id' + Writer + data: writes thing + + @trigger + launch startup + arcId readArcId + recipe ReadingRecipe + data: map 'my-handle-id' + Reader + data: reads data`); + + const resolver = new StorageKeyRecipeResolver(manifest); + // TODO: specify the correct error to be thrown + await assertThrowsAsync(resolver.resolve); + }); // TODO(alxr): Flush out outlined unit tests - it.skip('Short + Short: If WritingRecipe is short lived, it is not valid', () => {}); - it.skip('Short + Long: If WritingRecipe is short lived and Reading is long lived, it is not valid', () => {}); - it.skip('Invalid Type: If Reader reads {name: Text, age: Number} it is not valid', () => {}); - it.skip('No arc id: If arcId of WritingRecipe is not there, it is not valid', () => {}); - it.skip('No handleId: If id of handle in WritingRecipe is not provided, it is not valid', () => {}); - it.skip('Ambiguous handle: If there are 2 WritingRecipes creating the same handle, it is not valid', () => {}); - it.skip('Ambiguous handle + tag disambiguation: If there are 2 WritingRecipes creating the same handle but with different tags and mapping uses one of the tags, it is valid', () => {}); - it.skip('No Handle: If there is no writing handle, it is not valid', () => {}); + it.skip('No arc id: If arcId of WritingRecipe is not there, it is not valid', () => { + }); + it.skip('No handleId: If id of handle in WritingRecipe is not provided, it is not valid', () => { + }); + it.skip('Ambiguous handle: If there are 2 WritingRecipes creating the same handle, it is not valid', () => { + }); + it.skip('Ambiguous handle + tag disambiguation: If there are 2 WritingRecipes creating the same handle but with different tags and mapping uses one of the tags, it is valid', () => { + }); + it.skip('No Handle: If there is no writing handle, it is not valid', () => { + }); }); }); diff --git a/src/wasm/cpp/README.md b/src/wasm/cpp/README.md index 716fc935c1e..d0c17fca5c8 100644 --- a/src/wasm/cpp/README.md +++ b/src/wasm/cpp/README.md @@ -54,4 +54,4 @@ See [here](../../../particles/Native/Wasm) for a working example. # Test -`./tools/bazelisk test //src/wasm:wasm-api-test` +`./tools/bazelisk test //src/wasm/tests:wasm-api-test`