diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 03c88726..d8867d18 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -12,22 +12,24 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 + - uses: actions/setup-node@v1 + with: + node-version: 16 - run: npm install + - run: npm run build - run: npm run lint - # or - # - uses: gozala/typescript-error-reporter-action@v1.0.8 - - run: npx aegir build - - run: npx aegir dep-check - - uses: ipfs/aegir/actions/bundle-size@master - with: - github_token: ${{ secrets.GITHUB_TOKEN }} + - run: npm run depcheck test-node: needs: check runs-on: ${{ matrix.os }} + name: Test ${{ matrix.project }} node strategy: matrix: os: [windows-latest, ubuntu-latest, macos-latest] - node: [14, 16] + node: [16] + project: + - ipfs-repo + - ipfs-repo-migrations fail-fast: true steps: - uses: actions/checkout@v2 @@ -35,44 +37,49 @@ jobs: with: node-version: ${{ matrix.node }} - run: npm install - - run: npx aegir test -t node --bail --cov - - uses: codecov/codecov-action@v1 - test-chrome: - needs: check - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: microsoft/playwright-github-action@v1 - - run: npm install - - run: npx aegir test -t browser -t webworker --bail - - uses: codecov/codecov-action@v1 - test-firefox: - needs: check - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: microsoft/playwright-github-action@v1 - - run: npm install - - run: npx aegir test -t browser -t webworker --bail -- --browser firefox - test-webkit: + - run: npm run test -- --scope=${{ matrix.project }} -- -- --cov -t node + test-browser: needs: check runs-on: ubuntu-latest + name: test ${{ matrix.project }} ${{ matrix.browser }} ${{ matrix.type }} + strategy: + matrix: + project: + - ipfs-repo +# - ipfs-repo-migrations + browser: + - chromium + - firefox + type: + - browser + - webworker + fail-fast: true steps: - uses: actions/checkout@v2 - - uses: microsoft/playwright-github-action@v1 + - uses: actions/setup-node@v1 + with: + node-version: 16 - run: npm install - - run: npx aegir test -t browser -t webworker --bail -- --browser webkit - # test-electron-main: - # needs: check - # runs-on: ubuntu-latest - # steps: - # - uses: actions/checkout@v2 - # - run: npm install - # - run: npx xvfb-maybe aegir test -t electron-main --bail - # test-electron-renderer: - # needs: check - # runs-on: ubuntu-latest - # steps: - # - uses: actions/checkout@v2 - # - run: npm install - # - run: npx xvfb-maybe aegir test -t electron-renderer --bail + - run: npm run test -- --scope=${{ matrix.project }} -- -- -t ${{ matrix.type }} -- --browser ${{ matrix.browser }} +# test-electron: +# needs: check +# runs-on: ubuntu-latest +# name: test ${{ matrix.project }} ${{ matrix.type }} +# strategy: +# matrix: +# project: +# - ipfs-repo +# - ipfs-repo-migrations +# type: +# - electron-main +# - electron-renderer +# fail-fast: true +# steps: +# - uses: actions/checkout@v2 +# - uses: actions/setup-node@v1 +# with: +# node-version: 16 +# - run: npm install +# - uses: GabrielBB/xvfb-action@v1 +# with: +# run: npm run test -- --scope=${{ matrix.project }} -- -- -t ${{ matrix.type }} --bail diff --git a/.gitignore b/.gitignore index 36eff293..b9eda9fb 100644 --- a/.gitignore +++ b/.gitignore @@ -36,4 +36,4 @@ build node_modules dist - +types diff --git a/LICENSE-APACHE b/LICENSE-APACHE new file mode 100644 index 00000000..14478a3b --- /dev/null +++ b/LICENSE-APACHE @@ -0,0 +1,5 @@ +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/LICENSE b/LICENSE-MIT similarity index 89% rename from LICENSE rename to LICENSE-MIT index 5ed36143..749aa1ec 100644 --- a/LICENSE +++ b/LICENSE-MIT @@ -1,7 +1,5 @@ The MIT License (MIT) -Copyright (c) 2015 IPFS - Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights @@ -9,14 +7,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md index 185a659d..8c55072d 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,13 @@ -# IPFS Repo JavaScript Implementation +# ipfs-repo [![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) [![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) [![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) -[![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) -[![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo)](https://travis-ci.com/ipfs/js-ipfs-repo) -[![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo) [![Dependency Status](https://david-dm.org/ipfs/js-ipfs-repo.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-repo) -[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) +[![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo)](https://travis-ci.com/ipfs/js-ipfs-unixfs) +[![Codecov](https://codecov.io/gh/ipfs/js-ipfs-repo/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo) +[![Style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) -> Implementation of the IPFS repo spec (https://github.com/ipfs/specs/blob/master/REPO.md) in JavaScript - -This is the implementation of the [IPFS repo spec](https://github.com/ipfs/specs/blob/master/REPO.md) in JavaScript. +> The repository where blocks are stored and a tool to perform migrations between different versions ## Lead Maintainer @@ -18,412 +15,50 @@ This is the implementation of the [IPFS repo spec](https://github.com/ipfs/specs ## Table of Contents -- [Background](#background) -- [Install](#install) - - [npm](#npm) - - [Use in Node.js](#use-in-nodejs) - - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) - - [Use in a browser Using a script tag](#use-in-a-browser-using-a-script-tag) -- [Usage](#usage) -- [API](#api) - - [Setup](#setup) - - [`new Repo(path[, options])`](#new-repopath-options) - - [`Promise repo.init()`](#promise-repoinit) - - [`Promise repo.open()`](#promise-repoopen) - - [`Promise repo.close()`](#promise-repoclose) - - [`Promise repo.exists()`](#promiseboolean-repoexists) - - [`Promise repo.isInitialized()`](#promiseboolean-repoisinitialized) - - [Repos](#repos) - - [`Promise repo.put(key, value:Uint8Array)`](#promise-repoputkey-valueuint8array) - - [`Promise repo.get(key)`](#promiseuint8array-repogetkey) - - [Blocks](#blocks) - - [`Promise repo.blocks.put(block:Block)`](#promiseblock-repoblocksputblockblock) - - [`AsyncIterator repo.blocks.putMany(source:AsyncIterable)`](#asynciteratorblock-repoblocksputmanysourceasynciterableblock) - - [`Promise repo.blocks.get(cid:CID)`](#promiseblock-repoblocksgetcidcid) - - [`AsyncIterable repo.blocks.getMany(source:AsyncIterable)`](#asynciterableblock-repoblocksgetmanysourceasynciterablecid) - - [`Promise repo.blocks.has (cid:CID)`](#promiseboolean-repoblockshas-cidcid) - - [`Promise repo.blocks.delete (cid:CID)`](#promiseboolean-repoblocksdelete-cidcid) - - [`AsyncIterator repo.blocks.query (query)`](#asynciteratorblockcid-repoblocksquery-query) - - [`Promise repo.blocks.delete(cid:CID)`](#promisecid-repoblocksdeletecidcid) - - [`AsyncIterator repo.blocks.deleteMany(source:AsyncIterable)`](#asynciteratorcid-repoblocksdeletemanysourceasynciterablecid) - - [Datastore](#datastore) - - [`repo.datastore`](#repodatastore) - - [Config](#config) - - [`Promise repo.config.set(key:String, value:Object)`](#promise-repoconfigsetkeystring-valueobject) - - [`Promise repo.config.replace(value:Object)`](#promise-repoconfigreplacevalueobject) - - [`Promise repo.config.get(key:String)`](#promise-repoconfiggetkeystring) - - [`Promise repo.config.getAll()`](#promiseobject-repoconfiggetall) - - [`Promise repo.config.exists()`](#promiseboolean-repoconfigexists) - - [Version](#version) - - [`Promise repo.version.get()`](#promisenumber-repoversionget) - - [`Promise repo.version.set (version:Number)`](#promise-repoversionset-versionnumber) - - [API Addr](#api-addr) - - [`Promise repo.apiAddr.get()`](#promisestring-repoapiaddrget) - - [`Promise repo.apiAddr.set(value)`](#promise-repoapiaddrsetvalue) - - [Status](#status) - - [`Promise repo.stat()`](#promiseobject-repostat) - - [Lock](#lock) - - [`Promise lock.lock(dir)`](#promise-locklockdir) - - [`Promise closer.close()`](#promise-closerclose) - - [`Promise lock.locked(dir)`](#promiseboolean-locklockeddir) -- [Notes](#notes) - - [Migrations](#migrations) +- [Structure](#structure) +- [Development](#development) + - [Publishing new versions](#publishing-new-versions) + - [Using prerelease versions](#using-prerelease-versions) - [Contribute](#contribute) - [License](#license) -## Background - -Here is the architectural reasoning for this repo: - -```bash - ┌────────────────────────────────────────┐ - │ IPFSRepo │ - └────────────────────────────────────────┘ - ┌─────────────────┐ - │ / │ - ├─────────────────┤ - │ Datastore │ - └─────────────────┘ - ┌───────────┴───────────┐ - ┌─────────────────┐ ┌─────────────────┐ - │ /blocks │ │ /datastore │ - ├─────────────────┤ ├─────────────────┤ - │ Datastore │ │ LevelDatastore │ - └─────────────────┘ └─────────────────┘ - -┌────────────────────────────────────────┐ ┌────────────────────────────────────────┐ -│ IPFSRepo - Default Node.js │ │ IPFSRepo - Default Browser │ -└────────────────────────────────────────┘ └────────────────────────────────────────┘ - ┌─────────────────┐ ┌─────────────────┐ - │ / │ │ / │ - ├─────────────────┤ ├─────────────────┤ - │ FsDatastore │ │ IdbDatastore │ - └─────────────────┘ └─────────────────┘ - ┌───────────┴───────────┐ ┌───────────┴───────────┐ -┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ -│ /blocks │ │ /datastore │ │ /blocks │ │ /datastore │ -├─────────────────┤ ├─────────────────┤ ├─────────────────┤ ├─────────────────┤ -│ FlatfsDatastore │ │LevelDBDatastore │ │ IdbDatastore │ │ IdbDatastore │ -└─────────────────┘ └─────────────────┘ └─────────────────┘ └─────────────────┘ -``` - -This provides a well defined interface for creating and interacting with an IPFS repo. - -## Install - -### npm - -```sh -> npm install ipfs-repo -``` - -### Use in Node.js - -```js -var IPFSRepo = require('ipfs-repo') -``` - -### Use in a browser with browserify, webpack or any other bundler - -```js -var IPFSRepo = require('ipfs-repo') -``` - -### Use in a browser Using a script tag - -Loading this module through a script tag will make the `IpfsRepo` obj available in the global namespace. - -```html - -``` - -## Usage - -Example: - -```js -const Repo = require('ipfs-repo') -const repo = new Repo('/tmp/ipfs-repo') - -await repo.init({ cool: 'config' }) -await repo.open() -console.log('repo is ready') -``` - -This now has created the following structure, either on disk or as an in memory representation: - -``` -├── blocks -│   ├── SHARDING -│ └── _README -├── config -├── datastore -├── keys -└── version -``` - -## API - -### Setup - -#### `new Repo(path[, options])` - -Creates an IPFS Repo. - -Arguments: - -* `path` (string, mandatory): the path for this repo -* `options` (object, optional): may contain the following values - * `autoMigrate` (bool, defaults to `true`): controls automatic migrations of repository. - * `onMigrationProgress` (function(version, percentComplete, message)): callback function to be notified of migration progress - * `lock` ([Lock](#lock) or string *Deprecated*): what type of lock to use. Lock has to be acquired when opening. string can be `"fs"` or `"memory"`. - * `storageBackends` (object, optional): may contain the following values, which should each be a class implementing the [datastore interface](https://github.com/ipfs/interface-datastore#readme): - * `root` (defaults to [`datastore-fs`](https://github.com/ipfs/js-datastore-fs#readme) in Node.js and [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme) in the browser). Defines the back-end type used for gets and puts of values at the root (`repo.set()`, `repo.get()`) - * `blocks` (defaults to [`datastore-fs`](https://github.com/ipfs/js-datastore-fs#readme) in Node.js and [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme) in the browser). Defines the back-end type used for gets and puts of values at `repo.blocks`. - * `keys` (defaults to [`datastore-fs`](https://github.com/ipfs/js-datastore-fs#readme) in Node.js and [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme) in the browser). Defines the back-end type used for gets and puts of encrypted keys at `repo.keys` - * `datastore` (defaults to [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme)). Defines the back-end type used as the key-value store used for gets and puts of values at `repo.datastore`. - -```js -const repo = new Repo('path/to/repo') -``` - -#### `Promise repo.init()` - -Creates the necessary folder structure inside the repo - -#### `Promise repo.open()` - -[Locks](https://en.wikipedia.org/wiki/Record_locking) the repo to prevent conflicts arising from simultaneous access - -#### `Promise repo.close()` - -Unlocks the repo. - -#### `Promise repo.exists()` - -Tells whether this repo exists or not. Returned promise resolves to a `boolean` - -#### `Promise repo.isInitialized()` - -The returned promise resolves to `false` if the repo has not been initialized and `true` if it has - -### Repos - -Root repo: - -#### `Promise repo.put(key, value:Uint8Array)` - -Put a value at the root of the repo - -* `key` can be a Uint8Array, a string or a [Key][] - -#### `Promise repo.get(key)` - -Get a value at the root of the repo - -* `key` can be a Uint8Array, a string or a [Key][] - -### Blocks - -#### `Promise repo.blocks.put(block:Block)` - -* `block` should be of type [Block][] - -#### `AsyncIterator repo.blocks.putMany(source:AsyncIterable)` - -Put many blocks. - -* `source` should be an AsyncIterable that yields entries of type [Block][] - -#### `Promise repo.blocks.get(cid:CID)` - -Get block. - -* `cid` is the content id of type [CID][] - -#### `AsyncIterable repo.blocks.getMany(source:AsyncIterable)` - -Get many blocks - -* `source` should be an AsyncIterable that yields entries of type [CID][] - -#### `Promise repo.blocks.has (cid:CID)` - -Indicate if a block is present for the passed CID +## Structure -* `cid` should be of the type [CID][] +This project is broken into several modules, their purposes are: -#### `Promise repo.blocks.delete (cid:CID)` +* [`/packages/ipfs-repo`](./packages/ipfs-repo) The repo implementation +* [`/packages/ipfs-repo-migrations`](./packages/ipfs-urepo-migrations) A tool for migrating between different repo versions -Deletes a block +## Development -* `cid` should be of the type [CID][] +1. Clone this repo +2. Run `npm install` -#### `AsyncIterator repo.blocks.query (query)` +This will install [lerna](https://www.npmjs.com/package/lerna) and bootstrap the various packages, deduping and hoisting dependencies into the root folder. -Query what blocks are available in blockstore. +If later you wish to remove all the `node_modules`/`dist` folders and start again, run `npm run reset && npm install` from the root. -If `query.keysOnly` is true, the returned iterator will yield [CID][]s, otherwise it will yield [Block][]s +See the scripts section of the root [`package.json`](./package.json) for more commands. -* `query` is a object as specified in [interface-datastore](https://github.com/ipfs/interface-datastore#query). +### Publishing new versions -Datastore: +1. Ensure you have a `GH_TOKEN` env var containing a GitHub [Personal Access Token](https://github.com/settings/tokens) with `public_repo` permissions +2. From the root of this repo run `npm run release` and follow the on screen prompts. It will use [conventional commits](https://www.conventionalcommits.org) to work out the new package version -#### `Promise repo.blocks.delete(cid:CID)` +### Using prerelease versions -* `cid` should be of the type [CID][] +Any changed packages from each successful build of master are published to npm as canary builds under the npm tag `next`. -Delete a block - -#### `AsyncIterator repo.blocks.deleteMany(source:AsyncIterable)` - -* `source` should be an Iterable or AsyncIterable that yields entries of the type [CID][] - -Delete many blocks - -### Datastore - -#### `repo.datastore` - -This contains a full implementation of [the `interface-datastore` API](https://github.com/ipfs/interface-datastore#api). - -### Config - -Instead of using `repo.set('config')` this exposes an API that allows you to set and get a decoded config object, as well as, in a safe manner, change any of the config values individually. - -#### `Promise repo.config.set(key:String, value:Object)` - -Set a config value. `value` can be any object that is serializable to JSON. - -* `key` is a string specifying the object path. Example: - -```js -await repo.config.set('a.b.c', 'c value') -const config = await repo.config.get() -assert.equal(config.a.b.c, 'c value') -``` - -#### `Promise repo.config.replace(value:Object)` - -Set the whole config value. `value` can be any object that is serializable to JSON. - -#### `Promise repo.config.get(key:String)` - -Get a config value. Returned promise resolves to the same type that was set before. - -* `key` is a string specifying the object path. Example: - -```js -const value = await repo.config.get('a.b.c') -console.log('config.a.b.c = ', value) -``` - -#### `Promise repo.config.getAll()` - -Get the entire config value. - -#### `Promise repo.config.exists()` - -Whether the config sub-repo exists. - -### Version - -#### `Promise repo.version.get()` - -Gets the repo version (an integer). - -#### `Promise repo.version.set (version:Number)` - -Sets the repo version - -### API Addr - -#### `Promise repo.apiAddr.get()` - -Gets the API address. - -#### `Promise repo.apiAddr.set(value)` - -Sets the API address. - -* `value` should be a [Multiaddr][] or a String representing a valid one. - -### Status - -#### `Promise repo.stat()` - -Gets the repo status. - -Returned promise resolves to an `Object` with the following keys: - -- `numObjects` -- `repoPath` -- `repoSize` -- `version` -- `storageMax` - -### Lock - -IPFS Repo comes with two built in locks: memory and fs. These can be imported via the following: - -```js -const fsLock = require('ipfs-repo/src/lock') // Default in Node.js -const memoryLock = require('ipfs-repo/src/lock-memory') // Default in browser -``` - -You can also provide your own custom Lock. It must be an object with the following interface: - -#### `Promise lock.lock(dir)` - -Sets the lock if one does not already exist. If a lock already exists, should throw an error. - -`dir` is a string to the directory the lock should be created at. The repo typically creates the lock at its root. - -Returns `closer`, where `closer` has a `close` method for removing the lock. - -#### `Promise closer.close()` - -Closes the lock created by `lock.open` - -If no error was thrown, the lock was successfully removed. - -#### `Promise lock.locked(dir)` - -Checks the existence of the lock. - -`dir` is the path to the directory to check for the lock. The repo typically checks for the lock at its root. - -Returned promise resolves to a `boolean` indicating the existence of the lock. - -## Notes - -- [Explanation of how repo is structured](https://github.com/ipfs/js-ipfs-repo/pull/111#issuecomment-279948247) - -### Migrations - -When there is a new repo migration and the version of the repo is increased, don't -forget to propagate the changes into the test repo (`test/test-repo`). - -**For tools that run mainly in the browser environment, be aware that disabling automatic -migrations leaves the user with no way to run the migrations because there is no CLI in the browser. In such -a case, you should provide a way to trigger migrations manually.** +Canary builds only consider changes to packages in the last built commit so changes to the root config files should not result in new prereleases being published to npm. ## Contribute -There are some ways you can make this module better: - -- Consult our [open issues](https://github.com/ipfs/js-ipfs-repo/issues) and take on one of them -- Help our tests reach 100% coverage! +Feel free to join in. All welcome. Open an [issue](https://github.com/ipfs/js-ipfs-repo/issues)! This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). -[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) +[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/contributing.md) ## License -[MIT](LICENSE) - -[CID]: https://github.com/multiformats/js-cid -[Key]: https://github.com/ipfs/interface-datastore#keys -[Block]: https://github.com/ipld/js-ipld-block -[Multiaddr]: https://github.com/multiformats/js-multiaddr +[Apache-2.0](LICENSE-APACHE) OR [MIT](LICENSE-MIT) diff --git a/lerna.json b/lerna.json new file mode 100644 index 00000000..bf001c38 --- /dev/null +++ b/lerna.json @@ -0,0 +1,21 @@ +{ + "lerna": "3.22.1", + "packages": [ + "packages/*" + ], + "version": "independent", + "command": { + "bootstrap": { + "hoist": true + }, + "run": { + "stream": true + }, + "publish": { + "message": "chore: publish", + "conventionalCommits": true, + "createRelease": "github", + "verifyAccess": false + } + } +} diff --git a/package.json b/package.json index 8a9e2171..b988be9f 100644 --- a/package.json +++ b/package.json @@ -1,106 +1,30 @@ { "name": "ipfs-repo", - "version": "12.0.0", - "description": "IPFS Repo implementation", - "leadMaintainer": "Alex Potsides ", - "main": "src/index.js", - "types": "dist/src/index.d.ts", - "tsd": { - "directory": "test" - }, - "files": [ - "src", - "dist" - ], - "browser": { - "rimraf": false, - "datastore-fs": "datastore-level", - "./src/locks/fs.js": "./src/locks/memory.js", - "./src/default-options.js": "./src/default-options.browser.js" - }, + "version": "1.0.0", + "description": "The repo and migration tools used by IPFS", "scripts": { - "prepare": "aegir build --no-bundle", - "test": "tsd && aegir test", - "test:node": "aegir test -t node", - "test:browser": "aegir test -t browser", - "test:webworker": "aegir test -t webworker", - "build": "aegir build", - "lint": "aegir ts -p check && aegir lint", - "release": "aegir release", - "release-minor": "aegir release --type minor", - "release-major": "aegir release --type major", - "coverage": "aegir test -t node --cov && nyc report --reporter=html", - "dep-check": "aegir dep-check", - "docs": "aegir docs" - }, - "repository": { - "type": "git", - "url": "https://github.com/ipfs/js-ipfs-repo.git" - }, - "keywords": [ - "IPFS", - "libp2p", - "datastore" - ], - "homepage": "https://github.com/ipfs/js-ipfs-repo", - "engines": { - "node": ">=14.0.0", - "npm": ">=6.0.0" + "reset": "lerna run clean && rimraf packages/*/node_modules node_modules packages/*/package-lock.json package-lock.json", + "test": "lerna run test", + "coverage": "lerna run coverage", + "prepare": "lerna run prepare", + "build": "lerna run build", + "clean": "lerna run clean", + "lint": "lerna run lint", + "depcheck": "lerna run depcheck", + "release": "npm run update-contributors && lerna publish", + "release:rc": "lerna publish --canary --preid rc --dist-tag next", + "update-contributors": "aegir release --lint=false --test=false --bump=false --build=false --changelog=false --commit=false --tag=false --push=false --ghrelease=false --docs=false --publish=false" }, "devDependencies": { - "@ipld/dag-cbor": "^6.0.4", - "@types/bytes": "^3.1.0", - "@types/debug": "^4.1.5", - "@types/proper-lockfile": "^4.1.1", - "@types/rimraf": "^3.0.0", - "aegir": "^35.0.2", - "assert": "^2.0.0", - "blockstore-datastore-adapter": "^1.0.0", - "events": "^3.3.0", - "ipfs-utils": "^8.1.3", - "it-all": "^1.0.2", - "it-drain": "^1.0.1", - "it-first": "^1.0.2", - "just-range": "^2.1.0", - "rimraf": "^3.0.0", - "sinon": "^11.1.1", - "tsd": "^0.17.0", - "url": "^0.11.0", - "util": "^0.12.3" - }, - "dependencies": { - "@ipld/dag-pb": "^2.1.0", - "bytes": "^3.1.0", - "cborg": "^1.3.4", - "debug": "^4.1.0", - "err-code": "^3.0.1", - "eslint-plugin-ava": "^12.0.0", - "interface-blockstore": "^1.0.0", - "interface-datastore": "^5.0.0", - "ipfs-repo-migrations": "^10.0.0", - "it-filter": "^1.0.2", - "it-map": "^1.0.5", - "it-merge": "^1.0.2", - "it-parallel-batch": "^1.0.9", - "it-pipe": "^1.1.0", - "it-pushable": "^1.4.0", - "just-safe-get": "^2.0.0", - "just-safe-set": "^2.1.0", - "merge-options": "^3.0.4", - "mortice": "^2.0.1", - "multiformats": "^9.0.4", - "p-queue": "^6.0.0", - "proper-lockfile": "^4.0.0", - "sort-keys": "^4.0.0", - "uint8arrays": "^3.0.0" + "lerna": "^4.0.0", + "rimraf": "^3.0.2" }, - "eslintConfig": { - "extends": "ipfs", - "ignorePatterns": [ - "!.aegir.js" - ] + "repository": { + "type": "git", + "url": "git+https://github.com/ipfs/js-ipfs-repo.git" }, - "license": "MIT", + "private": true, + "license": "(Apache-2.0 OR MIT)", "contributors": [ "David Dias ", "achingbrain ", @@ -134,5 +58,12 @@ "Richard Littauer ", "Steef Min ", "Volker Mische " - ] + ], + "workspaces": [ + "packages/*" + ], + "engines": { + "node": ">=14.0.0", + "npm": ">=7.0.0" + } } diff --git a/packages/ipfs-repo-migrations/.aegir.js b/packages/ipfs-repo-migrations/.aegir.js new file mode 100644 index 00000000..e5e14f77 --- /dev/null +++ b/packages/ipfs-repo-migrations/.aegir.js @@ -0,0 +1,22 @@ +'use strict' + +const path = require('path') + +const esbuild = { + // this will inject all the named exports from 'node-globals.js' as globals + inject: [path.join(__dirname, 'scripts/node-globals.js')] +} + +/** @type {import('aegir').PartialOptions} */ +module.exports = { + test: { + browser: { + config: { + buildConfig: esbuild + } + } + }, + build: { + bundlesizeMax: '37kB' + } +} diff --git a/packages/ipfs-repo-migrations/CHANGELOG.md b/packages/ipfs-repo-migrations/CHANGELOG.md new file mode 100644 index 00000000..b3ca0b42 --- /dev/null +++ b/packages/ipfs-repo-migrations/CHANGELOG.md @@ -0,0 +1,284 @@ +# [10.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v9.0.1...v10.0.0) (2021-08-23) + + +### Bug Fixes + +* use new s3 instance for every test ([#124](https://github.com/ipfs/js-ipfs-repo-migrations/issues/124)) ([d3c0056](https://github.com/ipfs/js-ipfs-repo-migrations/commit/d3c0056160d7592da5d89e57f9d9faefa1fadd7d)) + + +### Features + +* migrate mfs root to datastore ([#126](https://github.com/ipfs/js-ipfs-repo-migrations/issues/126)) ([540a077](https://github.com/ipfs/js-ipfs-repo-migrations/commit/540a077528037f00468c09fc15a9f190de283967)) + + +### BREAKING CHANGES + +* adds a new migration, should go out as a major + + + +## [9.0.1](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v9.0.0...v9.0.1) (2021-07-30) + + +### Bug Fixes + +* remove optional chaining ([#122](https://github.com/ipfs/js-ipfs-repo-migrations/issues/122)) ([242dc8e](https://github.com/ipfs/js-ipfs-repo-migrations/commit/242dc8efcb5e612f01d4d8a6788d9640f21e2325)) + + + +# [9.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v8.0.0...v9.0.0) (2021-07-07) + + +### chore + +* upgrade to new multiformats module ([#98](https://github.com/ipfs/js-ipfs-repo-migrations/issues/98)) ([dad30b6](https://github.com/ipfs/js-ipfs-repo-migrations/commit/dad30b6cd4f3067a1ed86b0971d84b26f42667ce)) + + +### BREAKING CHANGES + +* Uses new CID class + +Co-authored-by: Rod Vagg + + + +# [8.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v7.0.3...v8.0.0) (2021-04-15) + + + +## [7.0.3](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v7.0.2...v7.0.3) (2021-03-21) + + +### Bug Fixes + +* remove hacks from build and test ([#82](https://github.com/ipfs/js-ipfs-repo-migrations/issues/82)) ([18c0793](https://github.com/ipfs/js-ipfs-repo-migrations/commit/18c07937013dd95c53e38bb775aeb44b5eb75bcb)) + + + +## [7.0.2](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v7.0.1...v7.0.2) (2021-03-15) + + + +## [7.0.1](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v7.0.0...v7.0.1) (2021-03-04) + + +### Bug Fixes + +* add cborg to browser field ([#76](https://github.com/ipfs/js-ipfs-repo-migrations/issues/76)) ([8d6c65d](https://github.com/ipfs/js-ipfs-repo-migrations/commit/8d6c65d75dea367ffeb54d891106211abef68da3)) + + + +# [7.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v6.0.0...v7.0.0) (2021-03-04) + + +### Features + +* add types ([#66](https://github.com/ipfs/js-ipfs-repo-migrations/issues/66)) ([349f3c8](https://github.com/ipfs/js-ipfs-repo-migrations/commit/349f3c842019edfbaed70fa3642fb280359a5aab)) + + +### BREAKING CHANGES + +* where there were previously no types, now there are types + + + +# [6.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v5.0.6...v6.0.0) (2021-01-29) + + +### Features + +* migration 10 to allow upgrading level in the browser ([#59](https://github.com/ipfs/js-ipfs-repo-migrations/issues/59)) ([7dc562b](https://github.com/ipfs/js-ipfs-repo-migrations/commit/7dc562b05eeeaa8db2de5a95a4b3bcbab6f17d5c)), closes [Level/level-js#179](https://github.com/Level/level-js/issues/179) + + + +## [5.0.6](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v5.0.5...v5.0.6) (2021-01-27) + + + + +## [5.0.5](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v5.0.4...v5.0.5) (2020-08-17) + + +### Bug Fixes + +* migrate empty repos ([#35](https://github.com/ipfs/js-ipfs-repo-migrations/issues/35)) ([e48efad](https://github.com/ipfs/js-ipfs-repo-migrations/commit/e48efad)) + + + + +## [5.0.4](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v5.0.3...v5.0.4) (2020-08-15) + + +### Bug Fixes + +* only count migrated blocks ([d49f338](https://github.com/ipfs/js-ipfs-repo-migrations/commit/d49f338)) + + + + +## [5.0.3](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v5.0.2...v5.0.3) (2020-08-15) + + +### Bug Fixes + +* pass repo options when migration error occurs ([267e718](https://github.com/ipfs/js-ipfs-repo-migrations/commit/267e718)) + + + + +## [5.0.2](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v5.0.1...v5.0.2) (2020-08-15) + + +### Bug Fixes + +* null-guard progress and enable migration 9 ([#34](https://github.com/ipfs/js-ipfs-repo-migrations/issues/34)) ([a42e671](https://github.com/ipfs/js-ipfs-repo-migrations/commit/a42e671)) + + + + +## [5.0.1](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v5.0.0...v5.0.1) (2020-08-15) + + +### Bug Fixes + +* root is not a store inside the store ([e4c9a9f](https://github.com/ipfs/js-ipfs-repo-migrations/commit/e4c9a9f)) + + + + +# [5.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v4.0.0...v5.0.0) (2020-08-15) + + +### Features + +* report migration progress ([#33](https://github.com/ipfs/js-ipfs-repo-migrations/issues/33)) ([051c0a4](https://github.com/ipfs/js-ipfs-repo-migrations/commit/051c0a4)), closes [#32](https://github.com/ipfs/js-ipfs-repo-migrations/issues/32) + + +### BREAKING CHANGES + +* - The signature of the `onProgress` callback has changed + + + + +# [4.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v3.0.0...v4.0.0) (2020-08-06) + + +### Bug Fixes + +* require passing repo options to migrator ([#31](https://github.com/ipfs/js-ipfs-repo-migrations/issues/31)) ([725f821](https://github.com/ipfs/js-ipfs-repo-migrations/commit/725f821)) + + +### BREAKING CHANGES + +* - `migrator.migrate(path, version, opts)` has changed to `migrator.migrate(path, repoOpts, version, opts)` +- `migrator.revert(path, version, opts)` has changed to `migrator.revert(path, repoOpts, version, opts)` + + + + +# [3.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v2.0.1...v3.0.0) (2020-08-05) + + +### Bug Fixes + +* replace node buffers with uint8arrays ([#25](https://github.com/ipfs/js-ipfs-repo-migrations/issues/25)) ([1e7592d](https://github.com/ipfs/js-ipfs-repo-migrations/commit/1e7592d)) + + +### BREAKING CHANGES + +* - node `Buffer`s have been replaced with `Uint8Array`s + + + + +## [2.0.1](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v2.0.0...v2.0.1) (2020-07-21) + + +### Bug Fixes + +* make recursive pins depth infinity ([ef95579](https://github.com/ipfs/js-ipfs-repo-migrations/commit/ef95579)) + + + + +# [2.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v1.0.0...v2.0.0) (2020-07-21) + + +### Features + +* add migration 9 to migrate pins to the datastore and back ([#15](https://github.com/ipfs/js-ipfs-repo-migrations/issues/15)) ([2b14578](https://github.com/ipfs/js-ipfs-repo-migrations/commit/2b14578)) + + + + +# [1.0.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v0.2.2...v1.0.0) (2020-06-25) + + + + +## [0.2.2](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v0.2.1...v0.2.2) (2020-06-23) + + +### Bug Fixes + +* **ci:** add empty commit to fix lint checks on master ([cf10410](https://github.com/ipfs/js-ipfs-repo-migrations/commit/cf10410)) + + + + +## [0.2.1](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v0.2.0...v0.2.1) (2020-04-28) + + +### Bug Fixes + +* linter ([2a20542](https://github.com/ipfs/js-ipfs-repo-migrations/commit/2a20542)) + + + + +# [0.2.0](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v0.1.1+migr-7...v0.2.0) (2020-04-14) + + +### Bug Fixes + +* remove datastore-level from the readme and deps ([dabab4e](https://github.com/ipfs/js-ipfs-repo-migrations/commit/dabab4e)) +* remove node globals ([8b51f8d](https://github.com/ipfs/js-ipfs-repo-migrations/commit/8b51f8d)) +* update datastore-idb ([3dc05ef](https://github.com/ipfs/js-ipfs-repo-migrations/commit/3dc05ef)) + + + + +## [0.1.1](https://github.com/ipfs/js-ipfs-repo-migrations/compare/v0.1.0+migr-7...v0.1.1) (2019-11-09) + + +### Bug Fixes + +* provide empty migrations for all previous versions ([6ecba01](https://github.com/ipfs/js-ipfs-repo-migrations/commit/6ecba01)) +* validate presence for all migrations in a range ([076f300](https://github.com/ipfs/js-ipfs-repo-migrations/commit/076f300)) + + + + +# 0.1.0 (2019-11-06) + + +### Bug Fixes + +* async lock-mem ([bc88612](https://github.com/ipfs/js-ipfs-repo-migrations/commit/bc88612)) +* details ([93e0c86](https://github.com/ipfs/js-ipfs-repo-migrations/commit/93e0c86)) +* removing staleness in lock ([e92d057](https://github.com/ipfs/js-ipfs-repo-migrations/commit/e92d057)) +* update src/utils.js ([662d89b](https://github.com/ipfs/js-ipfs-repo-migrations/commit/662d89b)) + + +### Features + +* confirm migration of irreversible migrations ([9477d57](https://github.com/ipfs/js-ipfs-repo-migrations/commit/9477d57)) +* empty 7-th migration ([dc886f0](https://github.com/ipfs/js-ipfs-repo-migrations/commit/dc886f0)) +* initial implementation ([#1](https://github.com/ipfs/js-ipfs-repo-migrations/issues/1)) ([aae9aec](https://github.com/ipfs/js-ipfs-repo-migrations/commit/aae9aec)) +* release metadata mentioned in README ([cf2409a](https://github.com/ipfs/js-ipfs-repo-migrations/commit/cf2409a)) +* require toVersion in migrate() ([1596dfe](https://github.com/ipfs/js-ipfs-repo-migrations/commit/1596dfe)) +* using browser field for browsers environment ([1474d5e](https://github.com/ipfs/js-ipfs-repo-migrations/commit/1474d5e)) + + + diff --git a/packages/ipfs-repo-migrations/LICENSE-APACHE b/packages/ipfs-repo-migrations/LICENSE-APACHE new file mode 100644 index 00000000..14478a3b --- /dev/null +++ b/packages/ipfs-repo-migrations/LICENSE-APACHE @@ -0,0 +1,5 @@ +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/packages/ipfs-repo-migrations/LICENSE-MIT b/packages/ipfs-repo-migrations/LICENSE-MIT new file mode 100644 index 00000000..749aa1ec --- /dev/null +++ b/packages/ipfs-repo-migrations/LICENSE-MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/packages/ipfs-repo-migrations/README.md b/packages/ipfs-repo-migrations/README.md new file mode 100644 index 00000000..83eb3685 --- /dev/null +++ b/packages/ipfs-repo-migrations/README.md @@ -0,0 +1,315 @@ +# Migration tool for JS IPFS Repo + +[![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo-migrations)](https://travis-ci.com/ipfs/js-ipfs-repo-migrations) +[![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo-migrations) +[![Dependency Status](https://david-dm.org/ipfs/js-ipfs-repo-migrations.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-repo-migrations) +[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) +[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) +[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) +[![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) +![](https://img.shields.io/badge/npm-%3E%3D6.0.0-orange.svg?style=flat-square) +![](https://img.shields.io/badge/Node.js-%3E%3D10.0.0-orange.svg?style=flat-square) + +> Migration framework for versioning of JS IPFS Repo + +This package is inspired by the [go-ipfs repo migration tool](https://github.com/ipfs/fs-repo-migrations/) + +## Lead Maintainer + +[Alex Potsides](http://github.com/achingbrain) + +## Table of Contents + +- [Background](#background) +- [Install](#install) + - [npm](#npm) + - [Use in Node.js](#use-in-nodejs) + - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) +- [Usage](#usage) +- [API](#api) + - [`.migrate(path, repoOptions, toVersion, {ignoreLock, onProgress, isDryRun}) -> Promise`](#migratepath-repooptions-toversion-ignorelock-onprogress-isdryrun---promisevoid) + - [`onProgress(version, percent, message)`](#onprogressversion-percent-message) + - [`.revert(path, repoOptions, toVersion, {ignoreLock, onProgress, isDryRun}) -> Promise`](#revertpath-repooptions-toversion-ignorelock-onprogress-isdryrun---promisevoid) + - [`getLatestMigrationVersion() -> int`](#getlatestmigrationversion---int) +- [Creating a new migration](#creating-a-new-migration) + - [Architecture of a migration](#architecture-of-a-migration) + - [`.migrate(repoPath, repoOptions)`](#migraterepopath-repooptions) + - [`.revert(repoPath, repoOptions)`](#revertrepopath-repooptions) + - [Browser vs. NodeJS environments](#browser-vs-nodejs-environments) + - [Guidelines](#guidelines) + - [Integration with js-ipfs](#integration-with-js-ipfs) + - [Tests](#tests) + - [Empty migrations](#empty-migrations) + - [Migrations matrix](#migrations-matrix) + - [Migrations](#migrations) + - [7](#7) + - [8](#8) + - [9](#9) + - [10](#10) +- [Developer](#developer) + - [Module versioning notes](#module-versioning-notes) +- [Contribute](#contribute) +- [License](#license) + +## Background + +As js-ipfs evolves and new technologies, algorithms and data structures are incorporated it is necessary to +enable users to transition between versions. Different versions of js-ipfs may expect a different IPFS repo structure or content (see: [IPFS repo spec](https://github.com/ipfs/specs/blob/master/REPO.md), [JS implementation](https://github.com/ipfs/js-ipfs-repo) ). +So the IPFS repo is versioned, and this package provides a framework to create migrations to transition +from one version of IPFS repo to the next/previous version. + +This framework: + * Handles locking/unlocking of repository + * Defines migrations API + * Executes and reports migrations in both directions: forward and backward + * Simplifies creation of new migrations + * Works on the browser too! + +## Install + +### npm + +```sh +> npm install ipfs-repo-migrations +``` + +### Use in Node.js + +```js +const migrations = require('ipfs-repo-migrations') +``` + +### Use in a browser with browserify, webpack or any other bundler + +```js +const migrations = require('ipfs-repo-migrations') +``` + +## Usage + +Example: + +```js +const migrations = require('ipfs-repo-migrations') + +const repoPath = 'some/repo/path' +const currentRepoVersion = 7 +const latestVersion = migrations.getLatestMigrationVersion() +const repoOptions = { + ... // the same storage backend/storage options passed to `ipfs-repo` +} + +if(currentRepoVersion < latestVersion){ + // Old repo! Lets migrate to latest version! + await migrations.migrate(repoPath, latestVersion, { + repoOptions + }) +} +``` + +To migrate your repository using the CLI, see the [how to run migrations](./run.md) tutorial. + +## API + +### `.migrate(path, repoOptions, toVersion, {ignoreLock, onProgress, isDryRun}) -> Promise` + +Executes a forward migration to a specific version, or to the latest version if a specific version is not specified. + +**Arguments:** + + * `path` (string, mandatory) - path to the repo to be migrated + * `repoOptions` (object, mandatory) - options that are passed to migrations, that use them to construct the datastore. (options are the same as for IPFSRepo). + * `toVersion` (int, mandatory) - version to which the repo should be migrated. + * `options` (object, optional) - options for the migration + * `options.ignoreLock` (bool, optional) - if true will not lock the repo when applying migrations. Use with caution. + * `options.onProgress` (function, optional) - callback that is called during each migration to report progress. + * `options.isDryRun` (bool, optional) - flag that indicates if it is a dry run that should give the same output as running a migration but without making any actual changes. + +#### `onProgress(version, percent, message)` + +Signature of the progress callback. + +**Arguments:** + * `migration` (object) - object of migration that just successfully finished running. See [Architecture of migrations](#architecture-of-migrations) for details. + * `counter` (int) - index of current migration. + * `totalMigrations` (int) - total count of migrations that will be run. + +### `.revert(path, repoOptions, toVersion, {ignoreLock, onProgress, isDryRun}) -> Promise` + +Executes backward migration to a specific version. + +**Arguments:** + + * `path` (string, mandatory) - path to the repo to be reverted + * `repoOptions` (object, mandatory) - options that are passed to migrations, that use them to construct the datastore. (options are the same as for IPFSRepo). + * `toVersion` (int, mandatory) - version to which the repo should be reverted to. + * `options` (object, optional) - options for the reversion + * `options.ignoreLock` (bool, optional) - if true will not lock the repo when applying migrations. Use with caution. + * `options.onProgress` (function, optional) - callback that is called during each migration to report progress. + * `options.isDryRun` (bool, optional) - flag that indicates if it is a dry run that should give the same output as running a migration but without making any actual changes. + +### `getLatestMigrationVersion() -> int` + +Return the version of the latest migration. + +## Creating a new migration + +Migrations are one of those things that can be extremely painful on users. At the end of the day, we want users never to have to think about it. The process should be: + +- SAFE. No data lost. Ever. +- Revertible. Tools must implement forward and backward (if possible) migrations. +- Tests. Migrations have to be well tested. +- To Spec. The tools must conform to the spec. + +If your migration has several parts, it should be fail-proof enough that if one part of migration fails the previous changes +are reverted before propagating the error. If possible then the outcome should be consistent repo so it migration could +be run again. + +### Architecture of a migration + +All migrations are placed in the `/migrations` folder. Each folder there represents one migration that follows the migration +API. + +All migrations are collected in `/migrations/index.js`, which should not be edited manually. + +**The order of migrations is important and migrations must be sorted in ascending order**. + +Each migration must follow this API. It must export an object in its `index.js` that has following properties: + + * `version` (int) - Number that represents the version which the repo will migrate to (eg. `8` will move the repo to version 8). + * `description` (string) - Brief description of what the migrations does. + * `migrate` (function) - Function that performs the migration (see signature of this function below) + * `revert` (function) - If defined then this function will revert the migration to the previous version. Otherwise it is assumed that it is not possible to revert this migration. + +#### `.migrate(repoPath, repoOptions)` + +_Do not confuse this function with the `require('ipfs-repo-migrations').migrate()` function that drives the whole migration process!_ + +Arguments: + * `repoPath` (string) - absolute path to the root of the repo + * `repoOptions` (object, optional) - object containing `IPFSRepo` options, that should be used to construct a datastore instance. + +#### `.revert(repoPath, repoOptions)` + +_Do not confuse this function with the `require('ipfs-repo-migrations').revert()` function that drives the whole backward migration process!_ + +Arguments: + * `repoPath` (string) - path to the root of the repo + * `repoOptions` (object, optional) - object containing `IPFSRepo` options, that should be used to construct the datastore instance. + +### Browser vs. NodeJS environments + +The migration might need to perform specific tasks in browser or NodeJS environment. In such a case create +migration file `/migrations/migration-/index_browser.js` which have to follow the same API is described before. +Then add entry in `package.json` to the `browser` field as follow: + +``` +'./migrations/migration-/index.js': './migrations/migration-/index_browser.js' +``` + +In browser environments then `index.js` will be replaced with `index_browser.js`. + +Simple migrations should not need to distinguish between +these environments as the datastore implementation will handle the main differences. + +There are currently two main datastore implementations: + 1. [`datastore-fs`](https://github.com/ipfs/js-datastore-fs) that is backed by file system and is used mainly in the NodeJS environment + 2. [`datastore-idb`](https://github.com/ipfs/js-datastore-idb) that is backed by LevelDB and is used mainly in the browser environment + + Both implementations share the same API and hence are interchangeable. + + When the migration is run in a browser environment, `datastore-fs` is automatically replaced with `datastore-idb` even + when it is directly imported (`require('datastore-fs')` will return `datastore-idb` in a browser). + So with simple migrations you shouldn't worry about the difference between `datastore-fs` and `datastore-idb` + and by default use the `datastore-fs` package (as the replace mechanism does not work vice versa). + +### Guidelines + +The recommended way to write a new migration is to first bootstrap a dummy migration using the CLI: + +```sh +> npm run new-migration +``` + +A new folder is created with the bootstrapped migration. You can then simply fill in the required fields and +write the rest of the migration! + +### Integration with js-ipfs + +When a new migration is created, new version of this package have to be released. Afterwards version of this package in [`js-ipfs-repo`](https://github.com/ipfs/js-ipfs-repo) have to be updated +together with the repo version that `IPFSRepo` expects. Then the updated version of `js-ipfs-repo` should be propagated to `js-ipfs`. + +### Tests + +If a migration affects any of the following functionality, it must provide tests for the following functions + to work under the version of the repo that it migrates to: + +* `/src/repo/version.js`:`getVersion()` - retrieving repository's version +* `/src/repo/lock.js`:`lock()` - locking repository that uses file system +* `/src/repo/lock-memory.js`:`lock()` - locking repository that uses memory + +Every migration must have test coverage. Tests for migrations should be placed in the `/test/migrations/` folder. Most probably +you will have to plug the tests into `browser.js`/`node.js` if they require specific bootstrapping on each platform. + +### Empty migrations + +For interop with go-ipfs it might be necessary just to bump a version of a repo without any actual +modification as there might not be any changes needed in the JS implementation. For that purpose you can create an "empty migration". + +The easiest way to do so is with the CLI: + +```sh +> npm run new-migration -- --empty +``` + +This will create an empty migration with the next version. + +### Migrations matrix + +| IPFS repo version | JS IPFS version | +| -----------------: |:----------------:| +| 7 | v0.0.0 | +| 8 | v0.48.0 | +| 9 | v0.49.0 | + +### Migrations + +#### 7 + +This is the initial version of the datastore, inherited from go-IPFS in an attempt to maintain cross-compatibility between the two implementations. + +#### 8 + +Blockstore keys are transformed into base32 representations of the multihash from the CID of the block. + +#### 9 + +Pins were migrated from a DAG to a Datastore - see [ipfs/js-ipfs#2771](https://github.com/ipfs/js-ipfs/pull/2771) + +#### 10 + +`level@6.x.x` upgrades the `level-js` dependency from `4.x.x` to `5.x.x`. This update requires a database migration to convert all string keys/values into buffers. Only runs in the browser, node is unaffected. See [Level/level-js#179](https://github.com/Level/level-js/pull/179) + +## Developer + +### Module versioning notes + +In order to have good overview of what version of package contains what kind of migration, to every release there +should be appended version's metadata in format `migr-`. If for releasing is used `aegir` +you can use the `release --metadata` option. + +## Contribute + +There are some ways you can make this module better: + +- Consult our [open issues](https://github.com/ipfs/js-ipfs-repo/issues) and take on one of them +- Help our tests reach 100% coverage! + +This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). + +[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) + +## License + +[Apache-2.0](LICENSE-APACHE) OR [MIT](LICENSE-MIT) diff --git a/packages/ipfs-repo-migrations/maintainer.json b/packages/ipfs-repo-migrations/maintainer.json new file mode 100644 index 00000000..17382c24 --- /dev/null +++ b/packages/ipfs-repo-migrations/maintainer.json @@ -0,0 +1,11 @@ +{ + "repoLeadMaintainer": { + "name": "Adam Uhlíř", + "email": "adam@uhlir.dev", + "username": "auhau" + }, + "workingGroup": { + "name": "JS IPFS", + "entryPoint": "https://github.com/ipfs/js-core" + } +} diff --git a/packages/ipfs-repo-migrations/migrations/index.js b/packages/ipfs-repo-migrations/migrations/index.js new file mode 100644 index 00000000..b1070fa5 --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/index.js @@ -0,0 +1,27 @@ +'use strict' + +/** + * @type {import('../src/types').Migration} + */ +const emptyMigration = { + description: 'Empty migration.', + // @ts-ignore + migrate: () => {}, + // @ts-ignore + revert: () => {}, + empty: true +} + +module.exports = [ + Object.assign({ version: 1 }, emptyMigration), + Object.assign({ version: 2 }, emptyMigration), + Object.assign({ version: 3 }, emptyMigration), + Object.assign({ version: 4 }, emptyMigration), + Object.assign({ version: 5 }, emptyMigration), + Object.assign({ version: 6 }, emptyMigration), + Object.assign({ version: 7 }, emptyMigration), + require('./migration-8'), + require('./migration-9'), + require('./migration-10'), + require('./migration-11') +] diff --git a/packages/ipfs-repo-migrations/migrations/migration-10/index.js b/packages/ipfs-repo-migrations/migrations/migration-10/index.js new file mode 100644 index 00000000..e9de5a56 --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-10/index.js @@ -0,0 +1,233 @@ +'use strict' + +const { + findLevelJs +} = require('../../src/utils') +const { fromString } = require('uint8arrays/from-string') +const { toString } = require('uint8arrays/to-string') + +/** + * @typedef {import('../../src/types').Migration} Migration + * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('../../src/types').MigrationProgressCallback} MigrationProgressCallback + * + * @typedef {{ type: 'del', key: string | Uint8Array } | { type: 'put', key: string | Uint8Array, value: Uint8Array }} Operation + * @typedef {function (string, Uint8Array): Operation[]} UpgradeFunction + * @typedef {function (Uint8Array, Uint8Array): Operation[]} DowngradeFunction + */ + +/** + * @param {string} name + * @param {Datastore} store + * @param {(message: string) => void} onProgress + */ +async function keysToBinary (name, store, onProgress = () => {}) { + const db = findLevelJs(store) + + // only interested in level-js + if (!db) { + onProgress(`${name} did not need an upgrade`) + + return + } + + onProgress(`Upgrading ${name}`) + + /** + * @type {UpgradeFunction} + */ + const upgrade = (key, value) => { + return [ + { type: 'del', key: key }, + { type: 'put', key: fromString(key), value: value } + ] + } + + await withEach(db, upgrade) +} + +/** + * @param {string} name + * @param {Datastore} store + * @param {(message: string) => void} onProgress + */ +async function keysToStrings (name, store, onProgress = () => {}) { + const db = findLevelJs(store) + + // only interested in level-js + if (!db) { + onProgress(`${name} did not need a downgrade`) + + return + } + + onProgress(`Downgrading ${name}`) + + /** + * @type {DowngradeFunction} + */ + const downgrade = (key, value) => { + return [ + { type: 'del', key: key }, + { type: 'put', key: toString(key), value: value } + ] + } + + await withEach(db, downgrade) +} + +/** + * @param {any} store + * @returns {Datastore} + */ +function unwrap (store) { + if (store.child) { + return unwrap(store.child) + } + + return store +} + +/** + * @param {import('../../src/types').Backends} backends + * @param {MigrationProgressCallback} onProgress + * @param {*} fn + */ +async function process (backends, onProgress, fn) { + /** + * @type {{ name: string, store: Datastore }[]} + */ + const datastores = Object.entries(backends) + .map(([key, backend]) => ({ key, backend: unwrap(backend) })) + .filter(({ key, backend }) => backend.constructor.name === 'LevelDatastore') + .map(({ key, backend }) => ({ + name: key, + store: backend + })) + + onProgress(0, `Migrating ${datastores.length} dbs`) + let migrated = 0 + + /** + * @param {string} message + */ + const progress = (message) => { + onProgress(Math.round((migrated / datastores.length) * 100), message) + } + + for (const { name, store } of datastores) { + await store.open() + + try { + await fn(name, store, progress) + } finally { + migrated++ + await store.close() + } + } + + onProgress(100, `Migrated ${datastores.length} dbs`) +} + +/** @type {Migration} */ +module.exports = { + version: 10, + description: 'Migrates datastore-level keys to binary', + migrate: (backends, onProgress = () => {}) => { + return process(backends, onProgress, keysToBinary) + }, + revert: (backends, onProgress = () => {}) => { + return process(backends, onProgress, keysToStrings) + } +} + +/** + * Uses the upgrade strategy from level-js@5.x.x - note we can't call the `.upgrade` command + * directly because it will be removed in level-js@6.x.x and we can't guarantee users will + * have migrated by then - e.g. they may jump from level-js@4.x.x straight to level-js@6.x.x + * so we have to duplicate the code here. + * + * @param {any} db + * @param {UpgradeFunction | DowngradeFunction} fn + * @returns {Promise} + */ +function withEach (db, fn) { + /** + * @param {Operation[]} operations + * @param {(error?: Error) => void} next + */ + function batch (operations, next) { + const store = db.store('readwrite') + const transaction = store.transaction + let index = 0 + /** @type {Error | undefined} */ + let error + + transaction.onabort = () => next(error || transaction.error || new Error('aborted by user')) + transaction.oncomplete = () => next() + + function loop () { + const op = operations[index++] + const key = op.key + let req + + try { + req = op.type === 'del' ? store.delete(key) : store.put(op.value, key) + } catch (err) { + error = err + transaction.abort() + return + } + + if (index < operations.length) { + req.onsuccess = loop + } + } + + loop() + } + + return new Promise((resolve, reject) => { + const it = db.iterator() + // raw keys and values only + /** + * @template T + * @param {T} data + */ + const id = (data) => data + it._deserializeKey = it._deserializeValue = id + next() + + function next () { + /** + * @param {Error | undefined} err + * @param {string | undefined} key + * @param {Uint8Array} value + */ + const handleNext = (err, key, value) => { + if (err || key === undefined) { + /** + * @param {Error | undefined} err2 + */ + const handleEnd = (err2) => { + if (err2) { + reject(err2) + return + } + + resolve() + } + + it.end(handleEnd) + + return + } + + // @ts-ignore + batch(fn(key, value), next) + } + it.next(handleNext) + } + }) +} diff --git a/packages/ipfs-repo-migrations/migrations/migration-11/index.js b/packages/ipfs-repo-migrations/migrations/migration-11/index.js new file mode 100644 index 00000000..8bf7152d --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-11/index.js @@ -0,0 +1,53 @@ +'use strict' + +const { Key } = require('interface-datastore') + +const MFS_ROOT_KEY = new Key('/local/filesroot') + +/** + * @param {import('../../src/types').Backends} backends + * @param {import('../../src/types').MigrationProgressCallback} onProgress + */ +async function storeMfsRootInDatastore (backends, onProgress = () => {}) { + onProgress(100, 'Migrating MFS root to repo datastore') + + await backends.root.open() + await backends.datastore.open() + + const root = await backends.root.get(MFS_ROOT_KEY) + await backends.datastore.put(MFS_ROOT_KEY, root) + await backends.root.delete(MFS_ROOT_KEY) + + await backends.datastore.close() + await backends.root.close() + + onProgress(100, 'Stored MFS root in repo datastore') +} + +/** + * @param {import('../../src/types').Backends} backends + * @param {import('../../src/types').MigrationProgressCallback} onProgress + */ +async function storeMfsRootInRoot (backends, onProgress = () => {}) { + onProgress(100, 'Migrating MFS root to repo root datastore') + + await backends.root.open() + await backends.datastore.open() + + const root = await backends.datastore.get(MFS_ROOT_KEY) + await backends.root.put(MFS_ROOT_KEY, root) + await backends.datastore.delete(MFS_ROOT_KEY) + + await backends.datastore.close() + await backends.root.close() + + onProgress(100, 'Stored MFS root in repo root datastore') +} + +/** @type {import('../../src/types').Migration} */ +module.exports = { + version: 11, + description: 'Store mfs root in the datastore', + migrate: storeMfsRootInDatastore, + revert: storeMfsRootInRoot +} diff --git a/packages/ipfs-repo-migrations/migrations/migration-8/index.js b/packages/ipfs-repo-migrations/migrations/migration-8/index.js new file mode 100644 index 00000000..1bfa1c50 --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-8/index.js @@ -0,0 +1,117 @@ +'use strict' + +const { CID } = require('multiformats/cid') +const Key = require('interface-datastore').Key +const log = require('debug')('ipfs:repo:migrator:migration-8') + +const length = require('it-length') +const { base32 } = require('multiformats/bases/base32') +const raw = require('multiformats/codecs/raw') +const mhd = require('multiformats/hashes/digest') + +/** + * @typedef {import('../../src/types').Migration} Migration + * @typedef {import('interface-datastore').Datastore} Datastore + */ + +/** + * @param {*} blockstore + * @returns {Datastore} + */ +function unwrap (blockstore) { + if (blockstore.child) { + return unwrap(blockstore.child) + } + + return blockstore +} + +/** + * @param {Key} key + */ +function keyToMultihash (key) { + try { + const buf = base32.decode(`b${key.toString().toLowerCase().slice(1)}`) + + // Extract multihash from CID + const multihash = CID.decode(buf).multihash.bytes + + // Encode and slice off multibase codec + // Should be uppercase for interop with go + const multihashStr = base32.encode(multihash).slice(1).toUpperCase() + + return new Key(`/${multihashStr}`, false) + } catch (err) { + return key + } +} + +/** + * @param {Key} key + */ +function keyToCid (key) { + try { + const buf = base32.decode(`b${key.toString().toLowerCase().slice(1)}`) + const digest = mhd.decode(buf) + + // CID to Key + const multihash = base32.encode(CID.createV1(raw.code, digest).bytes).slice(1) + + return new Key(`/${multihash.toUpperCase()}`, false) + } catch { + return key + } +} + +/** + * @param {import('../../src/types').Backends} backends + * @param {(percent: number, message: string) => void} onProgress + * @param {(key: Key) => Key} keyFunction + */ +async function process (backends, onProgress, keyFunction) { + const blockstore = backends.blocks + await blockstore.open() + + const unwrapped = unwrap(blockstore) + + const blockCount = await length(unwrapped.queryKeys({ + filters: [(key) => { + const newKey = keyFunction(key) + + return newKey.toString() !== key.toString() + }] + })) + + try { + let counter = 0 + + for await (const block of unwrapped.query({})) { + const newKey = keyFunction(block.key) + + // If the Key is base32 CIDv0 then there's nothing to do + if (newKey.toString() !== block.key.toString()) { + counter += 1 + log(`Migrating Block from ${block.key} to ${newKey}`, await unwrapped.has(block.key)) + + await unwrapped.delete(block.key) + await unwrapped.put(newKey, block.value) + + onProgress((counter / blockCount) * 100, `Migrated Block from ${block.key} to ${newKey}`) + } + } + } finally { + await blockstore.close() + } +} + +/** @type {Migration} */ +module.exports = { + version: 8, + description: 'Transforms key names into base32 encoding and converts Block store to use bare multihashes encoded as base32', + migrate: (backends, onProgress = () => {}) => { + return process(backends, onProgress, keyToMultihash) + }, + revert: (backends, onProgress = () => {}) => { + return process(backends, onProgress, keyToCid) + } +} diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/index.js b/packages/ipfs-repo-migrations/migrations/migration-9/index.js new file mode 100644 index 00000000..0551dd2d --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-9/index.js @@ -0,0 +1,165 @@ +'use strict' + +const { CID } = require('multiformats/cid') +const dagPb = require('@ipld/dag-pb') +const cbor = require('cborg') +const pinset = require('./pin-set') +const { cidToKey, PIN_DS_KEY, PinTypes } = require('./utils') +const length = require('it-length') +const { sha256 } = require('multiformats/hashes/sha2') +const mhd = require('multiformats/hashes/digest') +const { base32 } = require('multiformats/bases/base32') + +/** + * @typedef {import('../../src/types').Migration} Migration + * @typedef {import('../../src/types').MigrationProgressCallback} MigrationProgressCallback + * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion + */ + +/** + * @param {Blockstore} blockstore + * @param {Datastore} datastore + * @param {Datastore} pinstore + * @param {MigrationProgressCallback} onProgress + */ +async function pinsToDatastore (blockstore, datastore, pinstore, onProgress) { + if (!await datastore.has(PIN_DS_KEY)) { + return + } + + const mh = await datastore.get(PIN_DS_KEY) + const cid = CID.decode(mh) + const pinRootBuf = await blockstore.get(cid) + const pinRoot = dagPb.decode(pinRootBuf) + let counter = 0 + const pinCount = (await length(pinset.loadSet(blockstore, pinRoot, PinTypes.recursive))) + (await length(pinset.loadSet(blockstore, pinRoot, PinTypes.direct))) + + for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.recursive)) { + counter++ + + /** @type {{ depth: number, version?: CIDVersion, codec?: number }} */ + const pin = { + depth: Infinity + } + + if (cid.version !== 0) { + pin.version = cid.version + } + + if (cid.code !== dagPb.code) { + pin.codec = cid.code + } + + await pinstore.put(cidToKey(cid), cbor.encode(pin)) + + onProgress((counter / pinCount) * 100, `Migrated recursive pin ${cid}`) + } + + for await (const cid of pinset.loadSet(blockstore, pinRoot, PinTypes.direct)) { + counter++ + + /** @type {{ depth: number, version?: CIDVersion, codec?: number }} */ + const pin = { + depth: 0 + } + + if (cid.version !== 0) { + pin.version = cid.version + } + + if (cid.code !== dagPb.code) { + pin.codec = cid.code + } + + await pinstore.put(cidToKey(cid), cbor.encode(pin)) + + onProgress((counter / pinCount) * 100, `Migrated direct pin ${cid}`) + } + + await blockstore.delete(cid) + await datastore.delete(PIN_DS_KEY) +} + +/** + * @param {Blockstore} blockstore + * @param {Datastore} datastore + * @param {Datastore} pinstore + * @param {MigrationProgressCallback} onProgress + */ +async function pinsToDAG (blockstore, datastore, pinstore, onProgress) { + const recursivePins = [] + const directPins = [] + let counter = 0 + const pinCount = await length(pinstore.queryKeys({})) + + for await (const { key, value } of pinstore.query({})) { + counter++ + const pin = cbor.decode(value) + const cid = CID.create( + pin.version || 0, + pin.codec || dagPb.code, + mhd.decode(base32.decode('b' + key.toString().toLowerCase().split('/').pop())) + ) + + if (pin.depth === 0) { + onProgress((counter / pinCount) * 100, `Reverted direct pin ${cid}`) + + directPins.push(cid) + } else { + onProgress((counter / pinCount) * 100, `Reverted recursive pin ${cid}`) + + recursivePins.push(cid) + } + } + + onProgress(100, 'Updating pin root') + const pinRoot = { + Links: [ + await pinset.storeSet(blockstore, PinTypes.direct, directPins), + await pinset.storeSet(blockstore, PinTypes.recursive, recursivePins) + ] + } + const buf = dagPb.encode(pinRoot) + const digest = await sha256.digest(buf) + const cid = CID.createV0(digest) + + await blockstore.put(cid, buf) + await datastore.put(PIN_DS_KEY, cid.bytes) +} + +/** + * @param {import('../../src/types').Backends} backends + * @param {MigrationProgressCallback} onProgress + * @param {*} fn + */ +async function process (backends, onProgress, fn) { + const blockstore = backends.blocks + const datastore = backends.datastore + const pinstore = backends.pins + + await blockstore.open() + await datastore.open() + await pinstore.open() + + try { + await fn(blockstore, datastore, pinstore, onProgress) + } finally { + await pinstore.close() + await datastore.close() + await blockstore.close() + } +} + +/** @type {Migration} */ +module.exports = { + version: 9, + description: 'Migrates pins to datastore', + migrate: (backends, onProgress = () => {}) => { + return process(backends, onProgress, pinsToDatastore) + }, + revert: (backends, onProgress = () => {}) => { + return process(backends, onProgress, pinsToDAG) + } +} diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/pin-set.js b/packages/ipfs-repo-migrations/migrations/migration-9/pin-set.js new file mode 100644 index 00000000..d3e47449 --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-9/pin-set.js @@ -0,0 +1,280 @@ +'use strict' + +const { CID } = require('multiformats/cid') +const { + ipfs: { + pin: { + Set: PinSet + } + } +} = require('./pin') + +// @ts-ignore +const fnv1a = require('fnv1a') +const varint = require('varint') +const dagPb = require('@ipld/dag-pb') +const { DEFAULT_FANOUT, MAX_ITEMS, EMPTY_KEY } = require('./utils') +const { concat: uint8ArrayConcat } = require('uint8arrays/concat') +const { compare: uint8ArrayCompare } = require('uint8arrays/compare') +const { toString: uint8ArrayToString } = require('uint8arrays/to-string') +const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +const { sha256 } = require('multiformats/hashes/sha2') + +/** + * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('@ipld/dag-pb').PBNode} PBNode + * + * @typedef {object} Pin + * @property {CID} key + * @property {Uint8Array} [data] + */ + +/** + * @param {PBNode} rootNode + */ +function readHeader (rootNode) { + // rootNode.data should be a buffer of the format: + // < varint(headerLength) | header | itemData... > + const rootData = rootNode.Data + + if (!rootData) { + throw new Error('No data present') + } + + const hdrLength = varint.decode(rootData) + const vBytes = varint.decode.bytes + + if (vBytes <= 0) { + throw new Error('Invalid Set header length') + } + + if (vBytes + hdrLength > rootData.length) { + throw new Error('Impossibly large set header length') + } + + const hdrSlice = rootData.slice(vBytes, hdrLength + vBytes) + const header = PinSet.toObject(PinSet.decode(hdrSlice), { + defaults: false, + arrays: true, + longs: Number, + objects: false + }) + + if (header.version !== 1) { + throw new Error(`Unsupported Set version: ${header.version}`) + } + + if (header.fanout > rootNode.Links.length) { + throw new Error('Impossibly large fanout') + } + + return { + header: header, + data: rootData.slice(hdrLength + vBytes) + } +} + +/** + * @param {number} seed + * @param {CID} key + */ +function hash (seed, key) { + const buffer = new Uint8Array(4) + const dataView = new DataView(buffer.buffer) + dataView.setUint32(0, seed, true) + const encodedKey = uint8ArrayFromString(key.toString()) + const data = uint8ArrayConcat([buffer, encodedKey], buffer.byteLength + encodedKey.byteLength) + + return fnv1a(uint8ArrayToString(data)) +} + +/** + * @param {Blockstore} blockstore + * @param {PBNode} node + * @returns {AsyncGenerator} + */ +async function * walkItems (blockstore, node) { + const pbh = readHeader(node) + let idx = 0 + + for (const link of node.Links) { + if (idx < pbh.header.fanout) { + // the first pbh.header.fanout links are fanout bins + // if a fanout bin is not 'empty', dig into and walk its DAGLinks + const linkHash = link.Hash + + if (!EMPTY_KEY.equals(linkHash)) { + // walk the links of this fanout bin + const buf = await blockstore.get(linkHash) + const node = dagPb.decode(buf) + + yield * walkItems(blockstore, node) + } + } else { + // otherwise, the link is a pin + yield link.Hash + } + + idx++ + } +} + +/** + * @param {Blockstore} blockstore + * @param {PBNode} rootNode + * @param {string} name + */ +async function * loadSet (blockstore, rootNode, name) { + const link = rootNode.Links.find(l => l.Name === name) + + if (!link) { + throw new Error('No link found with name ' + name) + } + + const buf = await blockstore.get(link.Hash) + const node = dagPb.decode(buf) + + yield * walkItems(blockstore, node) +} + +/** + * @param {Blockstore} blockstore + * @param {Pin[]} items + */ +function storeItems (blockstore, items) { + return storePins(items, 0) + + /** + * @param {Pin[]} pins + * @param {number} depth + */ + async function storePins (pins, depth) { + const pbHeader = PinSet.encode({ + version: 1, + fanout: DEFAULT_FANOUT, + seed: depth + }).finish() + + const header = varint.encode(pbHeader.length) + const headerBuf = uint8ArrayConcat([header, pbHeader]) + const fanoutLinks = [] + + for (let i = 0; i < DEFAULT_FANOUT; i++) { + fanoutLinks.push({ + Name: '', + Tsize: 1, + Hash: EMPTY_KEY + }) + } + + if (pins.length <= MAX_ITEMS) { + const nodes = pins + .map(item => { + return ({ + link: { + Name: '', + Tsize: 1, + Hash: item.key + }, + data: item.data || new Uint8Array() + }) + }) + // sorting makes any ordering of `pins` produce the same DAGNode + .sort((a, b) => { + return uint8ArrayCompare(a.link.Hash.bytes, b.link.Hash.bytes) + }) + + const rootLinks = fanoutLinks.concat(nodes.map(item => item.link)) + const rootData = uint8ArrayConcat([headerBuf, ...nodes.map(item => item.data)]) + + return { + Data: rootData, + Links: rootLinks + } + } else { + // If the array of pins is > MAX_ITEMS, we: + // - distribute the pins among `DEFAULT_FANOUT` bins + // - create a DAGNode for each bin + // - add each pin as a DAGLink to that bin + // - create a root DAGNode + // - add each bin as a DAGLink + // - send that root DAGNode via callback + // (using go-ipfs' "wasteful but simple" approach for consistency) + // https://github.com/ipfs/go-ipfs/blob/master/pin/set.go#L57 + + /** @type {Pin[][]} */ + const bins = pins.reduce((bins, pin) => { + const n = hash(depth, pin.key) % DEFAULT_FANOUT + // @ts-ignore + bins[n] = n in bins ? bins[n].concat([pin]) : [pin] + return bins + }, []) + + let idx = 0 + for (const bin of bins) { + const child = await storePins(bin, depth + 1) + + await storeChild(child, idx) + + idx++ + } + + return { + Data: headerBuf, + Links: fanoutLinks + } + } + + /** + * @param {PBNode} child + * @param {number} binIdx + */ + async function storeChild (child, binIdx) { + const buf = dagPb.encode(child) + const digest = await sha256.digest(buf) + const cid = CID.createV0(digest) + + await blockstore.put(cid, buf) + + const size = child.Links.reduce((acc, curr) => acc + (curr.Tsize || 0), 0) + buf.length + + fanoutLinks[binIdx] = { + Name: '', + Tsize: size, + Hash: cid + } + } + } +} + +/** + * @param {Blockstore} blockstore + * @param {string} type + * @param {CID[]} cids + */ +async function storeSet (blockstore, type, cids) { + const rootNode = await storeItems(blockstore, cids.map(cid => { + return { + key: cid + } + })) + const buf = dagPb.encode(rootNode) + const digest = await sha256.digest(buf) + const cid = CID.createV0(digest) + + await blockstore.put(cid, buf) + + const size = rootNode.Links.reduce((acc, curr) => acc + curr.Tsize, 0) + buf.length + + return { + Name: type, + Tsize: size, + Hash: cid + } +} + +module.exports = { + loadSet, + storeSet +} diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/pin.d.ts b/packages/ipfs-repo-migrations/migrations/migration-9/pin.d.ts new file mode 100644 index 00000000..935f21ec --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-9/pin.d.ts @@ -0,0 +1,79 @@ +import * as $protobuf from "protobufjs"; +/** Namespace ipfs. */ +export namespace ipfs { + + /** Namespace pin. */ + namespace pin { + + /** Properties of a Set. */ + interface ISet { + + /** Set version */ + version?: (number|null); + + /** Set fanout */ + fanout?: (number|null); + + /** Set seed */ + seed?: (number|null); + } + + /** Represents a Set. */ + class Set implements ISet { + + /** + * Constructs a new Set. + * @param [p] Properties to set + */ + constructor(p?: ipfs.pin.ISet); + + /** Set version. */ + public version: number; + + /** Set fanout. */ + public fanout: number; + + /** Set seed. */ + public seed: number; + + /** + * Encodes the specified Set message. Does not implicitly {@link ipfs.pin.Set.verify|verify} messages. + * @param m Set message or plain object to encode + * @param [w] Writer to encode to + * @returns Writer + */ + public static encode(m: ipfs.pin.ISet, w?: $protobuf.Writer): $protobuf.Writer; + + /** + * Decodes a Set message from the specified reader or buffer. + * @param r Reader or buffer to decode from + * @param [l] Message length if known beforehand + * @returns Set + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + public static decode(r: ($protobuf.Reader|Uint8Array), l?: number): ipfs.pin.Set; + + /** + * Creates a Set message from a plain object. Also converts values to their respective internal types. + * @param d Plain object + * @returns Set + */ + public static fromObject(d: { [k: string]: any }): ipfs.pin.Set; + + /** + * Creates a plain object from a Set message. Also converts values to other types if specified. + * @param m Set + * @param [o] Conversion options + * @returns Plain object + */ + public static toObject(m: ipfs.pin.Set, o?: $protobuf.IConversionOptions): { [k: string]: any }; + + /** + * Converts this Set to JSON. + * @returns JSON object + */ + public toJSON(): { [k: string]: any }; + } + } +} diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/pin.js b/packages/ipfs-repo-migrations/migrations/migration-9/pin.js new file mode 100644 index 00000000..21b509e9 --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-9/pin.js @@ -0,0 +1,210 @@ +/*eslint-disable*/ +"use strict"; + +var $protobuf = require("protobufjs/minimal"); + +// Common aliases +var $Reader = $protobuf.Reader, $Writer = $protobuf.Writer, $util = $protobuf.util; + +// Exported root namespace +var $root = $protobuf.roots["default"] || ($protobuf.roots["default"] = {}); + +$root.ipfs = (function() { + + /** + * Namespace ipfs. + * @exports ipfs + * @namespace + */ + var ipfs = {}; + + ipfs.pin = (function() { + + /** + * Namespace pin. + * @memberof ipfs + * @namespace + */ + var pin = {}; + + pin.Set = (function() { + + /** + * Properties of a Set. + * @memberof ipfs.pin + * @interface ISet + * @property {number|null} [version] Set version + * @property {number|null} [fanout] Set fanout + * @property {number|null} [seed] Set seed + */ + + /** + * Constructs a new Set. + * @memberof ipfs.pin + * @classdesc Represents a Set. + * @implements ISet + * @constructor + * @param {ipfs.pin.ISet=} [p] Properties to set + */ + function Set(p) { + if (p) + for (var ks = Object.keys(p), i = 0; i < ks.length; ++i) + if (p[ks[i]] != null) + this[ks[i]] = p[ks[i]]; + } + + /** + * Set version. + * @member {number} version + * @memberof ipfs.pin.Set + * @instance + */ + Set.prototype.version = 0; + + /** + * Set fanout. + * @member {number} fanout + * @memberof ipfs.pin.Set + * @instance + */ + Set.prototype.fanout = 0; + + /** + * Set seed. + * @member {number} seed + * @memberof ipfs.pin.Set + * @instance + */ + Set.prototype.seed = 0; + + /** + * Encodes the specified Set message. Does not implicitly {@link ipfs.pin.Set.verify|verify} messages. + * @function encode + * @memberof ipfs.pin.Set + * @static + * @param {ipfs.pin.ISet} m Set message or plain object to encode + * @param {$protobuf.Writer} [w] Writer to encode to + * @returns {$protobuf.Writer} Writer + */ + Set.encode = function encode(m, w) { + if (!w) + w = $Writer.create(); + if (m.version != null && Object.hasOwnProperty.call(m, "version")) + w.uint32(8).uint32(m.version); + if (m.fanout != null && Object.hasOwnProperty.call(m, "fanout")) + w.uint32(16).uint32(m.fanout); + if (m.seed != null && Object.hasOwnProperty.call(m, "seed")) + w.uint32(29).fixed32(m.seed); + return w; + }; + + /** + * Decodes a Set message from the specified reader or buffer. + * @function decode + * @memberof ipfs.pin.Set + * @static + * @param {$protobuf.Reader|Uint8Array} r Reader or buffer to decode from + * @param {number} [l] Message length if known beforehand + * @returns {ipfs.pin.Set} Set + * @throws {Error} If the payload is not a reader or valid buffer + * @throws {$protobuf.util.ProtocolError} If required fields are missing + */ + Set.decode = function decode(r, l) { + if (!(r instanceof $Reader)) + r = $Reader.create(r); + var c = l === undefined ? r.len : r.pos + l, m = new $root.ipfs.pin.Set(); + while (r.pos < c) { + var t = r.uint32(); + switch (t >>> 3) { + case 1: + m.version = r.uint32(); + break; + case 2: + m.fanout = r.uint32(); + break; + case 3: + m.seed = r.fixed32(); + break; + default: + r.skipType(t & 7); + break; + } + } + return m; + }; + + /** + * Creates a Set message from a plain object. Also converts values to their respective internal types. + * @function fromObject + * @memberof ipfs.pin.Set + * @static + * @param {Object.} d Plain object + * @returns {ipfs.pin.Set} Set + */ + Set.fromObject = function fromObject(d) { + if (d instanceof $root.ipfs.pin.Set) + return d; + var m = new $root.ipfs.pin.Set(); + if (d.version != null) { + m.version = d.version >>> 0; + } + if (d.fanout != null) { + m.fanout = d.fanout >>> 0; + } + if (d.seed != null) { + m.seed = d.seed >>> 0; + } + return m; + }; + + /** + * Creates a plain object from a Set message. Also converts values to other types if specified. + * @function toObject + * @memberof ipfs.pin.Set + * @static + * @param {ipfs.pin.Set} m Set + * @param {$protobuf.IConversionOptions} [o] Conversion options + * @returns {Object.} Plain object + */ + Set.toObject = function toObject(m, o) { + if (!o) + o = {}; + var d = {}; + if (o.defaults) { + d.version = 0; + d.fanout = 0; + d.seed = 0; + } + if (m.version != null && m.hasOwnProperty("version")) { + d.version = m.version; + } + if (m.fanout != null && m.hasOwnProperty("fanout")) { + d.fanout = m.fanout; + } + if (m.seed != null && m.hasOwnProperty("seed")) { + d.seed = m.seed; + } + return d; + }; + + /** + * Converts this Set to JSON. + * @function toJSON + * @memberof ipfs.pin.Set + * @instance + * @returns {Object.} JSON object + */ + Set.prototype.toJSON = function toJSON() { + return this.constructor.toObject(this, $protobuf.util.toJSONOptions); + }; + + return Set; + })(); + + return pin; + })(); + + return ipfs; +})(); + +module.exports = $root; diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/pin.proto b/packages/ipfs-repo-migrations/migrations/migration-9/pin.proto new file mode 100644 index 00000000..4e1f33f7 --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-9/pin.proto @@ -0,0 +1,11 @@ +syntax = "proto2"; + +package ipfs.pin; + +option go_package = "pb"; + +message Set { + optional uint32 version = 1; + optional uint32 fanout = 2; + optional fixed32 seed = 3; +} diff --git a/packages/ipfs-repo-migrations/migrations/migration-9/utils.js b/packages/ipfs-repo-migrations/migrations/migration-9/utils.js new file mode 100644 index 00000000..a479ccc0 --- /dev/null +++ b/packages/ipfs-repo-migrations/migrations/migration-9/utils.js @@ -0,0 +1,31 @@ +'use strict' + +const { Key } = require('interface-datastore') +const { base32 } = require('multiformats/bases/base32') +const { CID } = require('multiformats') + +const PIN_DS_KEY = new Key('/local/pins') +const DEFAULT_FANOUT = 256 +const MAX_ITEMS = 8192 +const EMPTY_KEY = CID.parse('QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n') + +const PinTypes = { + direct: 'direct', + recursive: 'recursive' +} + +/** + * @param {import('multiformats').CID} cid + */ +function cidToKey (cid) { + return new Key(`/${base32.encode(cid.multihash.bytes).toUpperCase().substring(1)}`) +} + +module.exports = { + PIN_DS_KEY, + DEFAULT_FANOUT, + MAX_ITEMS, + EMPTY_KEY, + PinTypes, + cidToKey +} diff --git a/packages/ipfs-repo-migrations/package.json b/packages/ipfs-repo-migrations/package.json new file mode 100644 index 00000000..f0dff7b2 --- /dev/null +++ b/packages/ipfs-repo-migrations/package.json @@ -0,0 +1,81 @@ +{ + "name": "ipfs-repo-migrations", + "version": "10.0.0", + "description": "Migration framework for versioning of JS IPFS Repo", + "keywords": [ + "IPFS", + "libp2p", + "migrations" + ], + "homepage": "https://github.com/ipfs/js-ipfs-repo/tree/master/packages/ipfs-repo-migrations", + "bugs": { + "url": "https://github.com/ipfs/js-ipfs-repo/issues/" + }, + "license": "(Apache-2.0 OR MIT)", + "files": [ + "migrations", + "src", + "dist" + ], + "types": "./dist/src/index.d.ts", + "main": "src/index.js", + "browser": { + "datastore-fs": "datastore-level" + }, + "repository": { + "type": "git", + "url": "https://github.com/ipfs/js-ipfs-repo.git" + }, + "scripts": { + "clean": "rimraf types dist", + "build": "run-s build:*", + "build:proto": "pbjs -t static-module -w commonjs --force-number --no-verify --no-delimited --no-create --no-beautify --no-defaults --lint eslint-disable -o migrations/migration-9/pin.js migrations/migration-9/pin.proto", + "build:proto-types": "pbts -o migrations/migration-9/pin.d.ts migrations/migration-9/pin.js", + "build:source": "aegir build", + "test": "aegir test", + "lint": "aegir ts -p check && aegir lint", + "release": "aegir release", + "release-minor": "aegir release --type minor", + "release-major": "aegir release --type major", + "depcheck": "aegir dep-check" + }, + "dependencies": { + "@ipld/dag-pb": "^2.0.0", + "cborg": "^1.3.1", + "datastore-core": "^6.0.7", + "debug": "^4.1.0", + "fnv1a": "^1.0.1", + "interface-blockstore": "^2.0.2", + "interface-datastore": "^6.0.2", + "it-length": "^1.0.1", + "multiformats": "^9.0.0", + "proper-lockfile": "^4.1.1", + "protobufjs": "^6.10.2", + "uint8arrays": "^3.0.0", + "varint": "^6.0.0" + }, + "devDependencies": { + "@ipld/car": "^3.0.0", + "@types/debug": "^4.1.5", + "@types/rimraf": "^3.0.2", + "@types/varint": "^6.0.0", + "aegir": "^35.0.1", + "assert": "^2.0.0", + "aws-sdk": "^2.884.0", + "blockstore-core": "^1.0.2", + "blockstore-datastore-adapter": "2.0.1", + "datastore-fs": "^6.0.1", + "datastore-level": "^7.0.1", + "datastore-s3": "^8.0.0", + "events": "^3.2.0", + "it-all": "^1.0.2", + "just-safe-set": "^2.1.0", + "level-5": "npm:level@^5.0.0", + "level-6": "npm:level@^6.0.0", + "ncp": "^2.0.0", + "npm-run-all": "^4.1.5", + "rimraf": "^3.0.0", + "sinon": "^11.1.1", + "util": "^0.12.3" + } +} diff --git a/packages/ipfs-repo-migrations/scripts/node-globals.js b/packages/ipfs-repo-migrations/scripts/node-globals.js new file mode 100644 index 00000000..8c4e233b --- /dev/null +++ b/packages/ipfs-repo-migrations/scripts/node-globals.js @@ -0,0 +1,3 @@ +// file: node-globals.js +// @ts-nocheck +export const { Buffer } = require('buffer') diff --git a/packages/ipfs-repo-migrations/src/errors.js b/packages/ipfs-repo-migrations/src/errors.js new file mode 100644 index 00000000..1bf39295 --- /dev/null +++ b/packages/ipfs-repo-migrations/src/errors.js @@ -0,0 +1,90 @@ +'use strict' + +/** + * Exception raised when trying to revert migration that is not possible + * to revert. + */ +class NonReversibleMigrationError extends Error { + /** + * @param {string} message + */ + constructor (message) { + super(message) + this.name = 'NonReversibleMigrationError' + this.code = 'ERR_NON_REVERSIBLE_MIGRATION' + this.message = message + } +} +NonReversibleMigrationError.code = 'ERR_NON_REVERSIBLE_MIGRATION' + +/** + * Exception raised when repo is not initialized. + */ +class NotInitializedRepoError extends Error { + /** + * @param {string} message + */ + constructor (message) { + super(message) + this.name = 'NotInitializedRepoError' + this.code = 'ERR_NOT_INITIALIZED_REPO' + this.message = message + } +} +NotInitializedRepoError.code = 'ERR_NOT_INITIALIZED_REPO' + +/** + * Exception raised when required parameter is not provided. + */ +class RequiredParameterError extends Error { + /** + * @param {string} message + */ + constructor (message) { + super(message) + this.name = 'RequiredParameterError' + this.code = 'ERR_REQUIRED_PARAMETER' + this.message = message + } +} +RequiredParameterError.code = 'ERR_REQUIRED_PARAMETER' + +/** + * Exception raised when value is not valid. + */ +class InvalidValueError extends Error { + /** + * @param {string} message + */ + constructor (message) { + super(message) + this.name = 'InvalidValueError' + this.code = 'ERR_INVALID_VALUE' + this.message = message + } +} +InvalidValueError.code = 'ERR_INVALID_VALUE' + +/** + * Exception raised when config is not passed. + */ +class MissingRepoOptionsError extends Error { + /** + * @param {string} message + */ + constructor (message) { + super(message) + this.name = 'MissingRepoOptionsError' + this.code = 'ERR_MISSING_REPO_OPTIONS' + this.message = message + } +} +MissingRepoOptionsError.code = 'ERR_MISSING_REPO_OPTIONS' + +module.exports = { + NonReversibleMigrationError, + NotInitializedRepoError, + RequiredParameterError, + InvalidValueError, + MissingRepoOptionsError +} diff --git a/packages/ipfs-repo-migrations/src/index.js b/packages/ipfs-repo-migrations/src/index.js new file mode 100644 index 00000000..21d065a9 --- /dev/null +++ b/packages/ipfs-repo-migrations/src/index.js @@ -0,0 +1,271 @@ +/* eslint complexity: ["error", 27] */ +'use strict' + +const defaultMigrations = require('../migrations') +const repoVersion = require('./repo/version') +const errors = require('./errors') +const { wrapBackends } = require('./utils') +const log = require('debug')('ipfs:repo:migrator') + +/** + * @typedef {import('./types').Migration} Migration + * @typedef {import('./types').MigrationOptions} MigrationOptions + * @typedef {import('./types').ProgressCallback} ProgressCallback + * @typedef {import('./types').MigrationProgressCallback} MigrationProgressCallback + */ + +/** + * Returns the version of latest migration. + * If no migrations are present returns 0. + * + * @param {Migration[]} [migrations] - Array of migrations to consider. If undefined, the bundled migrations are used. Mainly for testing purpose. + */ +function getLatestMigrationVersion (migrations) { + migrations = migrations || defaultMigrations + + if (!Array.isArray(migrations) || migrations.length === 0) { + return 0 + } + + return migrations[migrations.length - 1].version +} + +/** + * Main function to execute forward migrations. + * It acquire lock on the provided path before doing any migrations. + * + * Signature of the progress callback is: function(migrationObject: object, currentMigrationNumber: int, totalMigrationsCount: int) + * + * @param {string} path - Path to initialized (!) JS-IPFS repo + * @param {import('./types').Backends} backends + * @param {import('./types').RepoOptions} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. + * @param {number} toVersion - Version to which the repo should be migrated. + * @param {MigrationOptions} [options] - Options for migration + */ +async function migrate (path, backends, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { + migrations = migrations || defaultMigrations + + if (!path) { + throw new errors.RequiredParameterError('Path argument is required!') + } + + if (!repoOptions) { + throw new errors.RequiredParameterError('repoOptions argument is required!') + } + + if (!toVersion) { + throw new errors.RequiredParameterError('toVersion argument is required!') + } + + if (!Number.isInteger(toVersion) || toVersion <= 0) { + throw new errors.InvalidValueError('Version has to be positive integer!') + } + + // make sure we can read pre-level@5 datastores + backends = wrapBackends(backends) + + const currentVersion = await repoVersion.getVersion(backends) + + if (currentVersion === toVersion) { + log('Nothing to migrate.') + return + } + + if (currentVersion > toVersion) { + throw new errors.InvalidValueError(`Current repo's version (${currentVersion}) is higher then toVersion (${toVersion}), you probably wanted to revert it?`) + } + + verifyAvailableMigrations(migrations, currentVersion, toVersion) + + let lock + + if (!isDryRun && !ignoreLock) { + lock = await repoOptions.repoLock.lock(path) + } + + try { + for (const migration of migrations) { + if (toVersion !== undefined && migration.version > toVersion) { + break + } + + if (migration.version <= currentVersion) { + continue + } + + log(`Migrating version ${migration.version}`) + + try { + if (!isDryRun) { + /** @type {MigrationProgressCallback} */ + let progressCallback = () => {} + + if (onProgress) { // eslint-disable-line max-depth + progressCallback = (percent, message) => onProgress(migration.version, percent.toFixed(2), message) + } + + await migration.migrate(backends, progressCallback) + } + } catch (e) { + const lastSuccessfullyMigratedVersion = migration.version - 1 + + log(`An exception was raised during execution of migration. Setting the repo's version to last successfully migrated version: ${lastSuccessfullyMigratedVersion}`) + await repoVersion.setVersion(lastSuccessfullyMigratedVersion, backends) + + throw new Error(`During migration to version ${migration.version} exception was raised: ${e.stack || e.message || e}`) + } + + log(`Migrating to version ${migration.version} finished`) + } + + if (!isDryRun) { + await repoVersion.setVersion(toVersion || getLatestMigrationVersion(migrations), backends) + } + + log('Repo successfully migrated', toVersion !== undefined ? `to version ${toVersion}!` : 'to latest version!') + } finally { + if (!isDryRun && !ignoreLock && lock) { + await lock.close() + } + } +} + +/** + * Main function to execute backward migration (reversion). + * It acquire lock on the provided path before doing any migrations. + * + * Signature of the progress callback is: function(migrationObject: object, currentMigrationNumber: int, totalMigrationsCount: int) + * + * @param {string} path - Path to initialized (!) JS-IPFS repo + * @param {import('./types').Backends} backends + * @param {import('./types').RepoOptions} repoOptions - Options that are passed to migrations, that can use them to correctly construct datastore. Options are same like for IPFSRepo. + * @param {number} toVersion - Version to which the repo will be reverted. + * @param {MigrationOptions} [options] - Options for the reversion + */ +async function revert (path, backends, repoOptions, toVersion, { ignoreLock = false, onProgress, isDryRun = false, migrations }) { + migrations = migrations || defaultMigrations + + if (!path) { + throw new errors.RequiredParameterError('Path argument is required!') + } + + if (!repoOptions) { + throw new errors.RequiredParameterError('repoOptions argument is required!') + } + + if (!toVersion) { + throw new errors.RequiredParameterError('When reverting migrations, you have to specify to which version to revert!') + } + + if (!Number.isInteger(toVersion) || toVersion <= 0) { + throw new errors.InvalidValueError('Version has to be positive integer!') + } + + // make sure we can read pre-level@5 datastores + backends = wrapBackends(backends) + + const currentVersion = await repoVersion.getVersion(backends) + + if (currentVersion === toVersion) { + log('Nothing to revert.') + return + } + + if (currentVersion < toVersion) { + throw new errors.InvalidValueError(`Current repo's version (${currentVersion}) is lower then toVersion (${toVersion}), you probably wanted to migrate it?`) + } + + verifyAvailableMigrations(migrations, toVersion, currentVersion, true) + + let lock + if (!isDryRun && !ignoreLock) { + lock = await repoOptions.repoLock.lock(path) + } + + log(`Reverting from version ${currentVersion} to ${toVersion}`) + + try { + const reversedMigrationArray = migrations.slice().reverse() + + for (const migration of reversedMigrationArray) { + if (migration.version <= toVersion) { + break + } + + if (migration.version > currentVersion) { + continue + } + + log(`Reverting migration version ${migration.version}`) + + try { + if (!isDryRun) { + /** @type {MigrationProgressCallback} */ + let progressCallback = () => {} + + if (onProgress) { // eslint-disable-line max-depth + progressCallback = (percent, message) => onProgress(migration.version, percent.toFixed(2), message) + } + + await migration.revert(backends, progressCallback) + } + } catch (e) { + const lastSuccessfullyRevertedVersion = migration.version + log(`An exception was raised during execution of migration. Setting the repo's version to last successfully reverted version: ${lastSuccessfullyRevertedVersion}`) + await repoVersion.setVersion(lastSuccessfullyRevertedVersion, backends) + + e.message = `During reversion to version ${migration.version} exception was raised: ${e.message}` + throw e + } + + log(`Reverting to version ${migration.version} finished`) + } + + if (!isDryRun) { + await repoVersion.setVersion(toVersion, backends) + } + + log(`All migrations successfully reverted to version ${toVersion}!`) + } finally { + if (!isDryRun && !ignoreLock && lock) { + await lock.close() + } + } +} + +/** + * Function checks if all migrations in given range are available. + * + * @param {Migration[]} migrations + * @param {number} fromVersion + * @param {number} toVersion + * @param {boolean} checkReversibility - Will additionally checks if all the migrations in the range are reversible + */ +function verifyAvailableMigrations (migrations, fromVersion, toVersion, checkReversibility = false) { + let migrationCounter = 0 + for (const migration of migrations) { + if (migration.version > toVersion) { + break + } + + if (migration.version > fromVersion) { + if (checkReversibility && !migration.revert) { + throw new errors.NonReversibleMigrationError(`It is not possible to revert to version ${fromVersion} because migration version ${migration.version} is not reversible. Cancelling reversion.`) + } + + migrationCounter++ + } + } + + if (migrationCounter !== (toVersion - fromVersion)) { + throw new errors.InvalidValueError(`The ipfs-repo-migrations package does not have all migration to migrate from version ${fromVersion} to ${toVersion}`) + } +} + +module.exports = { + getCurrentRepoVersion: repoVersion.getVersion, + getLatestMigrationVersion, + errors, + migrate, + revert +} diff --git a/packages/ipfs-repo-migrations/src/repo/init.js b/packages/ipfs-repo-migrations/src/repo/init.js new file mode 100644 index 00000000..30374ee6 --- /dev/null +++ b/packages/ipfs-repo-migrations/src/repo/init.js @@ -0,0 +1,42 @@ +'use strict' + +const log = require('debug')('ipfs:repo:migrator:repo:init') +const { CONFIG_KEY, VERSION_KEY } = require('../utils') +const { MissingRepoOptionsError } = require('../errors') + +/** + * @param {import('../types').Backends} backends + */ +async function isRepoInitialized (backends) { + if (!backends) { + throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') + } + + const root = backends.root + + try { + await root.open() + const versionCheck = await root.has(VERSION_KEY) + const configCheck = await root.has(CONFIG_KEY) + if (!versionCheck || !configCheck) { + log(`Version entry present: ${versionCheck}`) + log(`Config entry present: ${configCheck}`) + return false + } + + return true + } catch (e) { + log('While checking if repo is initialized error was thrown: ' + e.message) + return false + } finally { + if (root !== undefined) { + try { + await root.close() + } catch {} + } + } +} + +module.exports = { + isRepoInitialized +} diff --git a/packages/ipfs-repo-migrations/src/repo/version.js b/packages/ipfs-repo-migrations/src/repo/version.js new file mode 100644 index 00000000..9ee56c50 --- /dev/null +++ b/packages/ipfs-repo-migrations/src/repo/version.js @@ -0,0 +1,51 @@ +'use strict' + +const repoInit = require('./init') +const { MissingRepoOptionsError, NotInitializedRepoError } = require('../errors') +const { VERSION_KEY } = require('../utils') +const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +const { toString: uint8ArrayToString } = require('uint8arrays/to-string') + +/** + * Function that has responsibility to retrieve version of repo from its root datastore's instance. + * This function needs to be cross-repo-version functional to be able to fetch any version number, + * even in case of change of repo's versioning. + * + * @param {import('../types').Backends} backends + */ +async function getVersion (backends) { + if (!(await repoInit.isRepoInitialized(backends))) { + throw new NotInitializedRepoError('Repo is not initialized!') + } + + const store = backends.root + await store.open() + + try { + return parseInt(uint8ArrayToString(await store.get(VERSION_KEY))) + } finally { + await store.close() + } +} + +/** + * Function for setting a version in cross-repo-version manner. + * + * @param {number} version + * @param {import('../types').Backends} backends + */ +async function setVersion (version, backends) { + if (!backends) { + throw new MissingRepoOptionsError('Please pass repo options when trying to open a repo') + } + + const store = backends.root + await store.open() + await store.put(VERSION_KEY, uint8ArrayFromString(String(version))) + await store.close() +} + +module.exports = { + getVersion, + setVersion +} diff --git a/packages/ipfs-repo-migrations/src/types.d.ts b/packages/ipfs-repo-migrations/src/types.d.ts new file mode 100644 index 00000000..668b2878 --- /dev/null +++ b/packages/ipfs-repo-migrations/src/types.d.ts @@ -0,0 +1,84 @@ +import type { Datastore } from 'interface-datastore' +import type { Blockstore } from 'interface-blockstore' + +export interface ProgressCallback { + (version: number, progress: string, message: string): void +} + +export interface MigrationProgressCallback { + (percent: number, message: string): void +} + +export interface Migration { + version: number + description: string + migrate: (backends: Backends, onProgress: MigrationProgressCallback) => Promise + revert: (backends: Backends, onProgress: MigrationProgressCallback) => Promise +} + +export interface MigrationOptions { + /** + * Won't lock the repo for applying the migrations. Use with caution. + */ + ignoreLock?: boolean + /** + * Allows to simulate the execution of the migrations without any effect + */ + isDryRun?: boolean + /** + * Callback which will be called after each executed migration to report progress + */ + onProgress?: ProgressCallback + /** + * Array of migrations to perform. If undefined, the bundled migrations are used. Mainly for testing purposes. + */ + migrations?: Migration[] +} + +export interface Backends { + root: Datastore + blocks: Blockstore + keys: Datastore + datastore: Datastore + pins: Datastore +} + +export interface LockCloser { + close: () => Promise +} + +export interface RepoLock { + /** + * Sets the lock if one does not already exist. If a lock already exists, should throw an error. + */ + lock: (path: string) => Promise + + /** + * Checks the existence of the lock. + */ + locked: (path: string) => Promise +} + +export interface RepoOptions { + /** + * Controls automatic migrations of repository. (defaults: true) + */ + autoMigrate: boolean + /** + * Callback function to be notified of migration progress + */ + onMigrationProgress: (version: number, percentComplete: string, message: string) => void + + /** + * If multiple processes are accessing the same repo - e.g. via node cluster or browser UI and webworkers + * one instance must be designated the repo owner to hold the lock on shared resources like the datastore. + * + * Set this property to true on one instance only if this is how your application is set up. + */ + repoOwner: boolean + + /** + * A lock implementation that prevents multiple processes accessing the same repo + */ + repoLock: RepoLock +} diff --git a/packages/ipfs-repo-migrations/src/utils.js b/packages/ipfs-repo-migrations/src/utils.js new file mode 100644 index 00000000..c1f7b490 --- /dev/null +++ b/packages/ipfs-repo-migrations/src/utils.js @@ -0,0 +1,148 @@ +'use strict' + +const { Key } = require('interface-datastore/key') +const { Errors } = require('datastore-core') + +/** + * @typedef {import('interface-datastore').Datastore} Datastore + */ + +const CONFIG_KEY = new Key('/config') +const VERSION_KEY = new Key('/version') + +/** + * Level dbs wrap level dbs that wrap level dbs. Find a level-js + * instance in the chain if one exists. + * + * @param {Datastore} store + * @returns {Datastore | undefined} + */ +function findLevelJs (store) { + let db = store + + // @ts-ignore + while (db.db || db.child) { + // @ts-ignore + db = db.db || db.child + + // `Level` is only present in the browser, in node it is LevelDOWN + // @ts-ignore + if (db.type === 'level-js' || db.constructor.name === 'Level') { + return db + } + } +} + +/** + * @param {Key} key + * @param {function (Key): Promise} has + * @param {Datastore} store + * @returns {Promise} + */ +async function hasWithFallback (key, has, store) { + const result = await has(key) + + if (result) { + return result + } + + // Newer versions of level.js changed the key type from Uint8Array|string + // to Uint8Array so fall back to trying Uint8Arrays if we are using level.js + // and the string version of the key did not work + const levelJs = findLevelJs(store) + + if (!levelJs) { + return false + } + + return new Promise((resolve, reject) => { + // drop down to IndexDB API, otherwise level-js will monkey around with the keys/values + // @ts-ignore + const req = levelJs.store('readonly').get(key.toString()) + req.transaction.onabort = () => { + reject(req.transaction.error) + } + req.transaction.oncomplete = () => { + resolve(Boolean(req.result)) + } + }) +} + +/** + * @param {import('interface-datastore').Key} key + * @param {function (Key): Promise} get + * @param {function (Key): Promise} has + * @param {import('interface-datastore').Datastore} store + * @returns {Promise} + */ +async function getWithFallback (key, get, has, store) { + if (await has(key)) { + return get(key) + } + + // Newer versions of level.js changed the key type from Uint8Array|string + // to Uint8Array so fall back to trying Uint8Arrays if we are using level.js + // and the string version of the key did not work + const levelJs = findLevelJs(store) + + if (!levelJs) { + throw Errors.notFoundError() + } + + return new Promise((resolve, reject) => { + // drop down to IndexDB API, otherwise level-js will monkey around with the keys/values + // @ts-ignore + const req = levelJs.store('readonly').get(key.toString()) + req.transaction.onabort = () => { + reject(req.transaction.error) + } + req.transaction.oncomplete = () => { + if (req.result) { + return resolve(req.result) + } + + reject(Errors.notFoundError()) + } + }) +} + +/** + * @param {Datastore} store + */ +function wrapStore (store) { + // necessary since level-js@5 cannot read keys from level-js@4 and earlier + const originalGet = store.get.bind(store) + const originalHas = store.has.bind(store) + /** + * @param {Key} key + */ + store.get = (key) => getWithFallback(key, originalGet, originalHas, store) + /** + * @param {Key} key + */ + store.has = (key) => hasWithFallback(key, originalHas, store) + + return store +} + +/** + * @param {import('./types').Backends} backends + */ +function wrapBackends (backends) { + return { + ...backends, + root: wrapStore(backends.root), + datastore: wrapStore(backends.datastore), + pins: wrapStore(backends.pins), + keys: wrapStore(backends.keys) + } +} + +module.exports = { + wrapBackends, + hasWithFallback, + getWithFallback, + findLevelJs, + CONFIG_KEY, + VERSION_KEY +} diff --git a/packages/ipfs-repo-migrations/test/browser.js b/packages/ipfs-repo-migrations/test/browser.js new file mode 100644 index 00000000..49d01b18 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/browser.js @@ -0,0 +1,150 @@ +/* eslint-env mocha */ +'use strict' + +const { LevelDatastore } = require('datastore-level') +const { S3Datastore } = require('datastore-s3') +const { ShardingDatastore } = require('datastore-core/sharding') +const { NextToLast } = require('datastore-core/shard') +const { BlockstoreDatastoreAdapter } = require('blockstore-datastore-adapter') +const mockS3 = require('./fixtures/mock-s3') +const S3 = require('aws-sdk').S3 +const { createRepo } = require('./fixtures/repo') + +/** + * @typedef {import('../src/types').Backends} Backends + */ + +/** + * @param {*} dir + * @returns {Promise} + */ +async function deleteDb (dir) { + return new Promise((resolve) => { + const req = globalThis.indexedDB.deleteDatabase(dir) + req.onerror = () => { + console.error(`Could not delete ${dir}`) // eslint-disable-line no-console + resolve() + } + req.onsuccess = () => { + resolve() + } + }) +} + +/** + * @type {import('./types').CleanupFunction} + */ +async function cleanup (dir) { + await deleteDb(dir) + await deleteDb('level-js-' + dir) + + for (const type of ['blocks', 'keys', 'datastore', 'pins']) { + await deleteDb(dir + '/' + type) + await deleteDb('level-js-' + dir + '/' + type) + } +} + +const CONFIGURATIONS = [{ + name: 'local', + cleanup, + /** + * @param {string} prefix + * @returns {import('../src/types').Backends} + */ + createBackends: (prefix) => { + return { + root: new LevelDatastore(prefix, { + version: 2 + }), + blocks: new BlockstoreDatastoreAdapter( + new LevelDatastore(`${prefix}/blocks`, { + version: 2 + }) + ), + datastore: new LevelDatastore(`${prefix}/datastore`, { + version: 2 + }), + keys: new LevelDatastore(`${prefix}/keys`, { + version: 2 + }), + pins: new LevelDatastore(`${prefix}/pins`, { + version: 2 + }) + } + } +}, { + name: 'with s3', + cleanup: async () => {}, + /** + * @param {string} prefix + * @returns {import('../src/types').Backends} + */ + createBackends: (prefix) => { + const s3Instance = new S3({ + params: { + Bucket: 'test' + } + }) + mockS3(s3Instance) + + return { + root: new S3Datastore(prefix, { + s3: s3Instance, + createIfMissing: false + }), + blocks: new BlockstoreDatastoreAdapter( + new ShardingDatastore( + new S3Datastore(`${prefix}/blocks`, { + s3: s3Instance, + createIfMissing: false + }), + new NextToLast(2) + ) + ), + datastore: new ShardingDatastore( + new S3Datastore(`${prefix}/datastore`, { + s3: s3Instance, + createIfMissing: false + }), + new NextToLast(2) + ), + keys: new ShardingDatastore( + new S3Datastore(`${prefix}/keys`, { + s3: s3Instance, + createIfMissing: false + }), + new NextToLast(2) + ), + pins: new ShardingDatastore( + new S3Datastore(`${prefix}/pins`, { + s3: s3Instance, + createIfMissing: false + }), + new NextToLast(2) + ) + } + } +}] + +CONFIGURATIONS.forEach(({ name, createBackends, cleanup }) => { + /** @type {import('./types').SetupFunction} */ + const setup = (prefix) => createRepo(createBackends, prefix) + + describe(name, () => { + describe('version tests', () => { + require('./version-test')(setup, cleanup) + }) + + describe('migrations tests', () => { + require('./migrations')(setup, cleanup) + }) + + describe('init tests', () => { + require('./init-test')(setup, cleanup) + }) + + describe('integration tests', () => { + require('./integration-test')(setup, cleanup) + }) + }) +}) diff --git a/packages/ipfs-repo-migrations/test/fixtures/generate-car-files.js b/packages/ipfs-repo-migrations/test/fixtures/generate-car-files.js new file mode 100644 index 00000000..f6ce4842 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/fixtures/generate-car-files.js @@ -0,0 +1,154 @@ +'use strict' + +/* eslint-disable no-console */ + +// nb. must be ipfs@0.48.0 or below +// @ts-expect-error not in package.json +const IPFS = require('ipfs') +const { + CID +} = IPFS +const { Key } = require('interface-datastore') +const PIN_DS_KEY = new Key('/local/pins') +const fs = require('fs') +const { CarWriter } = require('@ipld/car') +const path = require('path') +const { Readable } = require('stream') + +const TO_PIN = 9000 + +const main = async () => { + const ipfs = await IPFS.create({ + profile: 'lowpower' + }) + + const directPins = [] + + for (let i = TO_PIN; i < TO_PIN + 10; i++) { + const data = `hello-${i}` + const { cid } = await ipfs.add(data, { pin: false }) + + await ipfs.pin.add(cid, { + recursive: false + }) + + directPins.push(cid) + } + + console.info('const directPins = [') + console.info(' ', directPins.map(cid => `'${cid}'`).join(',\n ')) + console.info(']') + + const nonDagPbRecursivePins = [] + + for (let i = TO_PIN + 10; i < TO_PIN + 20; i++) { + const data = { derp: `hello-${i}` } + const cid = await ipfs.dag.put(data) + + await ipfs.pin.add(cid, { + recursive: true + }) + + nonDagPbRecursivePins.push(`${cid}`) + } + + console.info('const nonDagPbRecursivePins = [') + console.info(' ', nonDagPbRecursivePins.join(',\n ')) + console.info(']') + + const nonDagPbDirectPins = [] + + for (let i = TO_PIN + 20; i < TO_PIN + 30; i++) { + const data = { derp: `hello-${i}` } + const cid = await ipfs.dag.put(data) + + await ipfs.pin.add(cid, { + recursive: false + }) + + nonDagPbDirectPins.push(`${cid}`) + } + + console.info('const nonDagPbDirectPins = [') + console.info(' ', nonDagPbDirectPins.join(',\n ')) + console.info(']') + + console.info('const pinsets = {') + + await writeCar('basic pinset', true) + + for (let i = 0; i < TO_PIN; i++) { + const data = `hello-${i}` + await ipfs.add(data) + } + + await writeCar('multiple buckets pinset') + + console.info('}') + + await ipfs.stop() + + /** + * @param {string} pinsetName + * @param {boolean} [more] + */ + async function writeCar (pinsetName, more) { + const fileName = `pinset-${pinsetName.replace(/\s/g, '-').replace('-pinset', '')}.car` + + console.info(` '${pinsetName}': {`) + console.info(` car: loadFixture('test/fixtures/${fileName}'),`) + + const buf = await ipfs.libp2p.datastore.get(PIN_DS_KEY) + const cid = CID.decode(buf) + + console.info(` root: CID.parse('${cid}'),`) + + const { writer, out } = await CarWriter.create([cid]) + Readable.from(out).pipe(fs.createWriteStream(path.join(__dirname, fileName))) + + await walk(cid, writer) + + let pins = 0 + + for await (const _ of ipfs.pin.ls()) { // eslint-disable-line no-unused-vars + pins++ + } + + console.info(` pins: ${pins}`) + console.info(` }${more ? ',' : ''}`) + + await writer.close() + } + + /** + * @param {CID} cid + * @param {import('@ipld/car/api').BlockWriter} car + * @param {Record} cids + * @returns + */ + async function walk (cid, car, cids = {}) { + if (cids[cid.toString()]) { + return + } + + cids[cid.toString()] = true + + const block = await ipfs.block.get(cid) + + car.put({ cid, bytes: block.data }) + + const { value: node } = await ipfs.dag.get(cid) + + if (node.Links) { + for (const link of node.Links) { + await walk(link.Hash, car, cids) + } + } + } +} + +main() + .catch(err => { + console.error(err) + process.exit(1) + }) diff --git a/packages/ipfs-repo-migrations/test/fixtures/mock-s3.js b/packages/ipfs-repo-migrations/test/fixtures/mock-s3.js new file mode 100644 index 00000000..1ea8d6b8 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/fixtures/mock-s3.js @@ -0,0 +1,159 @@ +'use strict' + +const { expect } = require('aegir/utils/chai') +const sinon = require('sinon') +const { Buffer } = require('buffer') +const AWS = require('aws-sdk') + +class S3Error extends Error { + /** + * @param {string} message + * @param {number} [code] + */ + constructor (message, code) { + super(message) + this.code = message + this.statusCode = code + } +} + +/** + * @template T + * @param {T} [res] + */ +const s3Resolve = (res) => { + const request = new AWS.Request(new AWS.Service(), 'op') + + sinon.replace(request, 'promise', () => { + return Promise.resolve(res) + }) + + return request +} + +/** + * @template T + * @param {T} err + */ +const s3Reject = (err) => { + const request = new AWS.Request(new AWS.Service(), 'op') + + sinon.replace(request, 'promise', () => { + return Promise.reject(err) + }) + + return request +} + +/** + * Mocks out the s3 calls made by datastore-s3 + * + * @param {import('aws-sdk/clients/s3')} s3 + * @returns {void} + */ +module.exports = function (s3) { + /** @type {Record} */ + const storage = {} + + sinon.replace(s3, 'deleteObject', (params) => { + expect(params).to.have.property('Key').that.is.a('string') + + if (!params) { + throw new Error('No params passed to s3.deleteObject') + } + + if (typeof params === 'function') { + throw new Error('params passed to s3.deleteObject was a function') + } + + if (storage[params.Key]) { + delete storage[params.Key] + return s3Resolve({}) + } + + return s3Reject(new S3Error('NotFound', 404)) + }) + + sinon.replace(s3, 'getObject', (params) => { + expect(params).to.have.property('Key').that.is.a('string') + + if (!params) { + throw new Error('No params passed to s3.getObject') + } + + if (typeof params === 'function') { + throw new Error('params passed to s3.getObject was a function') + } + + if (storage[params.Key]) { + return s3Resolve({ Body: storage[params.Key] }) + } + + return s3Reject(new S3Error('NotFound', 404)) + }) + + sinon.replace(s3, 'headBucket', (params) => { + expect(params).to.have.property('Bucket').that.is.a('string') + + if (!params) { + throw new Error('No params passed to s3.headBucket') + } + + if (typeof params === 'function') { + throw new Error('params passed to s3.headBucket was a function') + } + + return s3Resolve() + }) + + sinon.replace(s3, 'headObject', (params) => { + expect(params).to.have.property('Key').that.is.a('string') + + if (!params) { + throw new Error('No params passed to s3.headObject') + } + + if (typeof params === 'function') { + throw new Error('params passed to s3.headObject was a function') + } + + if (storage[params.Key]) { + return s3Resolve({}) + } + return s3Reject(new S3Error('NotFound', 404)) + }) + + sinon.replace(s3, 'listObjectsV2', (params) => { + expect(params).to.have.property('Prefix').that.is.a('string') + + if (!params) { + throw new Error('No params passed to s3.listObjectsV2') + } + + if (typeof params === 'function') { + throw new Error('params passed to s3.listObjectsV2 was a function') + } + + const results = { + /** @type {({ Key: string })[]} */ + Contents: [] + } + + for (const k in storage) { + if (k.startsWith(`${params.Prefix || ''}`)) { + results.Contents.push({ + Key: k + }) + } + } + + return s3Resolve(results) + }) + + sinon.replace(s3, 'upload', (params) => { + expect(params.Key).to.be.a('string') + expect(params.Body).to.be.instanceof(Buffer) + storage[params.Key] = params.Body + return s3Resolve({}) + }) +} diff --git a/packages/ipfs-repo-migrations/test/fixtures/pinset-basic.car b/packages/ipfs-repo-migrations/test/fixtures/pinset-basic.car new file mode 100644 index 00000000..5d94af94 Binary files /dev/null and b/packages/ipfs-repo-migrations/test/fixtures/pinset-basic.car differ diff --git a/packages/ipfs-repo-migrations/test/fixtures/pinset-multiple-buckets.car b/packages/ipfs-repo-migrations/test/fixtures/pinset-multiple-buckets.car new file mode 100644 index 00000000..423e6505 Binary files /dev/null and b/packages/ipfs-repo-migrations/test/fixtures/pinset-multiple-buckets.car differ diff --git a/packages/ipfs-repo-migrations/test/fixtures/repo.js b/packages/ipfs-repo-migrations/test/fixtures/repo.js new file mode 100644 index 00000000..b5e71b38 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/fixtures/repo.js @@ -0,0 +1,43 @@ +'use strict' + +const loadFixture = require('aegir/utils/fixtures') +const { CONFIG_KEY, VERSION_KEY } = require('../../src/utils') + +/** + * @typedef {import('../../src/types').Backends} Backends + */ + +/** + * + * @param {(dir: string) => import('../../src/types').Backends} createBackends + * @param {*} prefix + * @returns + */ +async function createRepo (createBackends, prefix) { + const dir = `${prefix ? `${prefix}/` : ''}test-repo-for-${Date.now()}` + const backends = createBackends(dir) + + await backends.root.open() + await backends.root.close() + + return { + dir, + backends + } +} + +/** + * @param {Backends} backends + */ +async function initRepo (backends) { + const store = backends.root + await store.open() + await store.put(VERSION_KEY, loadFixture('test/fixtures/test-repo/version')) + await store.put(CONFIG_KEY, loadFixture('test/fixtures/test-repo/config')) + await store.close() +} + +module.exports = { + createRepo, + initRepo +} diff --git a/packages/ipfs-repo-migrations/test/fixtures/test-repo/config b/packages/ipfs-repo-migrations/test/fixtures/test-repo/config new file mode 100644 index 00000000..5a9765f5 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/fixtures/test-repo/config @@ -0,0 +1,99 @@ +{ + "Identity": { + "PeerID": "QmQJPPKEd1a1zLrsDzmzKMnbkkNNmCziUMsXvvkLbjPg1c", + "PrivKey": "CAASqQkwggSlAgEAAoIBAQC6rWI46zRrseaaQq94Y14LJczstp0boYRr7vzg3jugJOqGhqzD+t88vmelYuyI0bJkoMI8WkfSBT0OYbmwszgzNFvYvZNPvfgzt2qLODU1/3PP0ihYcO42yAoX3KWlExuX1xi/nNaUvMPGF4oMucp6PN2o/A2uAv0jsnAeb8hJM4MNs5aQoI5gby2yxtM9mc78UkdV1MZ00IvOdrU2IViNwTVre/PwjbaR4RPcF40+/E2zVJG/z8wOMnpkKAkIxCSjJcaRcxR1bAfP/cKLbpxrpnQlHgVEBVhIaqkd3i5i1+dG8VgsD5LZPh/hLjldXiSYrNMO70Ksne6sc7nL/3UhAgMBAAECggEBAKqKe5FETz+T5vhGs8doACJvBie7LQxxImj4jE1iuVY0Y41Cu9LO/eKgkE+ncDAOYstLkidQ0wwrfPwGi33CPTWKP95iUpInGvkkN1G4p+QM2+CgPfuOMBIb7hyelzWXnz24ZAOpZN+9E52FI7k8gp3pvRcELfsq/9f8zDECLhewRjZitiY5ewfBKsK5LFKQSRg8DIVIKq4iqi7QMRRwbFDtDLcUHJepXSTRhmhWr5b/23O8OxnHExtjXMFqtBzvaAuZPnw2whr8ujV3W+5PyY+btx6M0k/dDQe2dFSJWm8AKLF8AL+houl2vtnFQ47yeYisWd02BcR91DyF5u6hxYECgYEA8b1UlUgCJ1h4MLTt0HOk+JwccJtyLoofe/y8/i4sMtjUZ4kiyIlEUrmPaE3qcNsUOG5eldz4nfN4AYRG4UQwmMFehpGC5pxH/kW8xwC9iFxDbkrNwJ8T5Jc8EQmFZ9BTXviQ3d5ViO06gtOiAdf2L/ci/qazCR7eowdjvbUso0MCgYEAxbCFZjj0L7+Zuyl6Znv7A1raJUKTGR8J5OsP6lIcRq0EfC6OcoCS6bx7FIbM0gkgGj+F/1l1bonfCcCDv5x4tRZzVUCz2KrDEwo/RCv1Y47ipuusao8fTTlO5tgESl4jAvaD1mfZom9BslO4sk3CxXJyAuMJUCc/8To6HLPclcsCgYEAwcuRknd+2oIPIKT7fcAoAfJMzm2f1PWAFgrgjDkBz+WKKlKDIxcAQf1mnSzRhJXtGOxKQYQ7f4zeqQCdPhxHhT6IBAOFWFDzWkIX+8V5LGci27l2RzSAYyJm0hW68CXPoHRO1r9V/QaJgLYey7GROJS8Zj/HiclInJPg/wXOejcCgYBjiwcg+fy7LldSjwg7IqK6hbzilitLsv1gb5yL+NyUGfrwLOE9dtCDMY0oQNhRypaHoQTwFzOyfWn5lx7AFSISzUM14mas6w9fPwGsahYeE4y9UF55KagxUnIQeyVt7QjkLQ0loRVNXYhPKykNX2p70aznFztRSPJBnXg1i7u/EQKBgQC8iZQF/5vwgVN3FgEugru9DKUOWAXMXWcMENRgQ9kiUNU7cwqszQyOk/zmNXrrIbdzn+4/H4gBNRw+dy0t1D5nG7+AuMn1J9WmRS0NF6eF43ttRRmERxiYwRssBs0JLaQWaobmEqcMNCygm1BCqQrKfmY2oI3HDOukMwgMpfZsSQ==" + }, + "Datastore": { + "Type": "leveldb", + "Path": "/Users/pedroteixeira/.ipfs/datastore", + "StorageMax": "10GB", + "StorageGCWatermark": 90, + "GCPeriod": "1h", + "Params": null, + "NoSync": false, + "HashOnRead": false, + "BloomFilterSize": 0 + }, + "Addresses": { + "Swarm": [ + "/ip4/0.0.0.0/tcp/4001", + "/ip6/::/tcp/4001" + ], + "API": "/ip4/127.0.0.1/tcp/5001", + "Gateway": "/ip4/127.0.0.1/tcp/8080" + }, + "Mounts": { + "IPFS": "/ipfs", + "IPNS": "/ipns", + "FuseAllowOther": false + }, + "Discovery": { + "MDNS": { + "Enabled": true, + "Interval": 10 + } + }, + "Ipns": { + "RepublishPeriod": "", + "RecordLifetime": "", + "ResolveCacheSize": 128 + }, + "Bootstrap": [ + "/ip4/104.131.131.82/tcp/4001/ipfs/QmaCpDMGvV2BGHeYERUEnRQAwe3N8SzbUtfsmvsqQLuvuJ", + "/ip4/104.236.176.52/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z", + "/ip4/104.236.179.241/tcp/4001/ipfs/QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM", + "/ip4/162.243.248.213/tcp/4001/ipfs/QmSoLueR4xBeUbY9WZ9xGUUxunbKWcrNFTDAadQJmocnWm", + "/ip4/128.199.219.111/tcp/4001/ipfs/QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu", + "/ip4/104.236.76.40/tcp/4001/ipfs/QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64", + "/ip4/178.62.158.247/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd", + "/ip4/178.62.61.185/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3", + "/ip4/104.236.151.122/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx", + "/ip6/2604:a880:1:20::1f9:9001/tcp/4001/ipfs/QmSoLnSGccFuZQJzRadHn95W2CrSFmZuTdDWP8HXaHca9z", + "/ip6/2604:a880:1:20::203:d001/tcp/4001/ipfs/QmSoLPppuBtQSGwKDZT2M73ULpjvfd3aZ6ha4oFGL1KrGM", + "/ip6/2604:a880:0:1010::23:d001/tcp/4001/ipfs/QmSoLueR4xBeUbY9WZ9xGUUxunbKWcrNFTDAadQJmocnWm", + "/ip6/2400:6180:0:d0::151:6001/tcp/4001/ipfs/QmSoLSafTMBsPKadTEgaXctDQVcqN88CNLHXMkTNwMKPnu", + "/ip6/2604:a880:800:10::4a:5001/tcp/4001/ipfs/QmSoLV4Bbm51jM9C4gDYZQ9Cy3U6aXMJDAbzgu2fzaDs64", + "/ip6/2a03:b0c0:0:1010::23:1001/tcp/4001/ipfs/QmSoLer265NRgSp2LA3dPaeykiS1J6DifTC88f5uVQKNAd", + "/ip6/2a03:b0c0:1:d0::e7:1/tcp/4001/ipfs/QmSoLMeWqB7YGVLJN3pNLQpmmEk35v6wYtsMGLzSr5QBU3", + "/ip6/2604:a880:1:20::1d9:6001/tcp/4001/ipfs/QmSoLju6m7xTh3DuokvT3886QRYqxAzb1kShaanJgW36yx" + ], + "Tour": { + "Last": "" + }, + "Gateway": { + "HTTPHeaders": { + "Access-Control-Allow-Headers": [ + "X-Requested-With", + "Range" + ], + "Access-Control-Allow-Methods": [ + "GET" + ], + "Access-Control-Allow-Origin": [ + "*" + ] + }, + "RootRedirect": "", + "Writable": false, + "PathPrefixes": [] + }, + "SupernodeRouting": { + "Servers": null + }, + "API": { + "HTTPHeaders": null + }, + "Swarm": { + "AddrFilters": null, + "DisableBandwidthMetrics": false, + "DisableNatPortMap": false + }, + "Reprovider": { + "Interval": "12h" + }, + "Experimental": { + "FilestoreEnabled": false, + "ShardingEnabled": false, + "Libp2pStreamMounting": false + } +} \ No newline at end of file diff --git a/packages/ipfs-repo-migrations/test/fixtures/test-repo/version b/packages/ipfs-repo-migrations/test/fixtures/test-repo/version new file mode 100644 index 00000000..d00491fd --- /dev/null +++ b/packages/ipfs-repo-migrations/test/fixtures/test-repo/version @@ -0,0 +1 @@ +1 diff --git a/packages/ipfs-repo-migrations/test/index.spec.js b/packages/ipfs-repo-migrations/test/index.spec.js new file mode 100644 index 00000000..6486f306 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/index.spec.js @@ -0,0 +1,533 @@ +/* eslint-env mocha */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const sinon = require('sinon') +const { MemoryBlockstore } = require('blockstore-core/memory') +const { MemoryDatastore } = require('datastore-core') + +const migrator = require('../src/index') +const repoVersion = require('../src/repo/version') +const repoInit = require('../src/repo/init') +const errors = require('../src/errors') + +/** + * @typedef {import('../src/types').Migration} Migration + * @typedef {import('../src/types').MigrationOptions} MigrationOptions + */ + +/** + * @returns {Migration[]} + */ +function createMigrations () { + return [ + { + version: 1, + description: '', + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + }, + { + version: 2, + description: '', + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + }, + { + version: 3, + description: '', + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + }, + { + version: 4, + description: '', + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + } + ] +} + +/** + * @returns {Required} + */ +function createOptions () { + return { + ignoreLock: false, + isDryRun: false, + onProgress: () => {}, + migrations: createMigrations() + } +} + +describe('index.js', () => { + /** + * @type {import('sinon').SinonStub} + */ + let getVersionStub + /** + * @type {import('sinon').SinonStub} + */ + let setVersionStub + /** + * @type {import('sinon').SinonStub} + */ + let lockStub + /** + * @type {import('sinon').SinonStub} + */ + let initStub + /** + * @type {import('sinon').SinonStub} + */ + let lockCloseStub + const repoOptions = { + repoLock: { + locked: () => Promise.resolve(false), + lock: () => Promise.resolve({ + close: () => Promise.resolve() + }) + }, + autoMigrate: true, + onMigrationProgress: () => {}, + repoOwner: true + } + + const backends = { + root: new MemoryDatastore(), + blocks: new MemoryBlockstore(), + datastore: new MemoryDatastore(), + keys: new MemoryDatastore(), + pins: new MemoryDatastore() + } + + beforeEach(() => { + // Reset all stubs + sinon.reset() + + initStub.resolves(true) + lockCloseStub.resolves() + lockStub.resolves({ close: lockCloseStub }) + }) + + before(() => { + getVersionStub = sinon.stub(repoVersion, 'getVersion') + setVersionStub = sinon.stub(repoVersion, 'setVersion') + lockCloseStub = sinon.stub() + lockStub = sinon.stub(repoOptions.repoLock, 'lock') + initStub = sinon.stub(repoInit, 'isRepoInitialized') + }) + + after(() => { + getVersionStub.restore() + setVersionStub.restore() + lockStub.restore() + initStub.restore() + }) + + it('get version of the latest migration', () => { + const migrationsMock = createMigrations() + + expect(migrator.getLatestMigrationVersion(migrationsMock)).to.equal(4) + expect(migrator.getLatestMigrationVersion([])).to.equal(0) + }) + + describe('revert', () => { + it('should error with out path argument', () => { + const options = createOptions() + + // @ts-expect-error invalid params + return expect(migrator.revert(undefined, undefined, undefined, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error without backends argument', () => { + const options = createOptions() + + // @ts-expect-error invalid params + return expect(migrator.revert('/some/path', undefined, undefined, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error without repo options argument', () => { + const options = createOptions() + + // @ts-expect-error invalid params + return expect(migrator.revert('/some/path', backends, undefined, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error without toVersion argument', () => { + const options = createOptions() + + // @ts-expect-error invalid params + return expect(migrator.revert('/some/path', backends, {}, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error with invalid toVersion argument', () => { + const invalidValues = ['eight', '-1', '1', -1] + const options = createOptions() + + return Promise.all( + // @ts-expect-error invalid params + invalidValues.map((value) => expect(migrator.revert('/some/path', backends, repoOptions, value, options)) + .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code)) + ) + }) + + it('should not revert if current repo version and toVersion matches', async () => { + getVersionStub.returns(2) + const options = createOptions() + + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) + .to.eventually.be.fulfilled() + + expect(lockStub).to.have.property('called', false) + }) + + it('should not revert if current repo version is lower then toVersion', async () => { + getVersionStub.returns(2) + const options = createOptions() + + await expect(migrator.revert('/some/path', backends, repoOptions, 3, options)) + .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) + + expect(lockStub).to.have.property('called', false) + }) + + it('should not allow to reverse migration that is not reversible', () => { + const nonReversibleMigrationsMock = createMigrations() + // @ts-expect-error invalid params + nonReversibleMigrationsMock[2].revert = undefined + const options = { migrations: nonReversibleMigrationsMock } + + getVersionStub.returns(4) + return expect( + migrator.revert('/some/path', backends, repoOptions, 1, options) + ).to.eventually.be.rejectedWith(errors.NonReversibleMigrationError) + .with.property('code', errors.NonReversibleMigrationError.code) + }) + + it('should revert expected migrations', async () => { + const options = createOptions() + getVersionStub.returns(3) + + await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) + .to.eventually.be.fulfilled() + + expect(lockCloseStub).to.have.property('calledOnce', true) + expect(lockStub).to.have.property('calledOnce', true) + expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() + + // Checking migrations + expect(options.migrations[3].revert).to.have.property('called', false) + expect(options.migrations[2].revert).to.have.property('calledOnce', true) + expect(options.migrations[1].revert).to.have.property('calledOnce', true) + expect(options.migrations[0].revert).to.have.property('called', false) + }) + + it('should revert one migration as expected', async () => { + const options = createOptions() + getVersionStub.returns(2) + + await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) + .to.eventually.be.fulfilled() + + expect(lockCloseStub).to.have.property('calledOnce', true) + expect(lockStub).to.have.property('calledOnce', true) + expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() + + // Checking migrations + expect(options.migrations[3].revert).to.have.property('called', false) + expect(options.migrations[2].revert).to.have.property('called', false) + expect(options.migrations[1].revert).to.have.property('calledOnce', true) + expect(options.migrations[0].revert).to.have.property('called', false) + }) + + it('should reversion with one migration', async () => { + const migrationsMock = [ + { + version: 2, + description: '', + reversible: true, + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + } + ] + const options = { migrations: migrationsMock } + getVersionStub.returns(2) + + await expect(migrator.revert('/some/path', backends, repoOptions, 1, options)) + .to.eventually.be.fulfilled() + + expect(lockCloseStub).to.have.property('calledOnce', true) + expect(lockStub).to.have.property('calledOnce', true) + expect(setVersionStub.calledOnceWith(1, backends)).to.be.true() + + // Checking migrations + expect(migrationsMock[0].revert).to.have.property('calledOnce', true) + }) + + it('should not have any side-effects when in dry run', async () => { + const options = createOptions() + getVersionStub.returns(4) + options.isDryRun = true + + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) + .to.eventually.be.fulfilled() + + expect(lockCloseStub).to.have.property('called', false) + expect(lockStub).to.have.property('called', false) + expect(setVersionStub).to.have.property('called', false) + + return options.migrations.forEach(({ revert }) => expect(revert).to.have.property('calledOnce', false)) + }) + + it('should not lock repo when ignoreLock is used', async () => { + const options = createOptions() + options.ignoreLock = true + + getVersionStub.returns(4) + + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) + .to.eventually.be.fulfilled() + + expect(lockCloseStub).to.have.property('called', false) + expect(lockStub).to.have.property('called', false) + expect(setVersionStub.calledOnceWith(2, backends)).to.be.true() + + // Checking migrations + expect(options.migrations[3].revert).to.have.property('calledOnce', true) + expect(options.migrations[2].revert).to.have.property('calledOnce', true) + expect(options.migrations[1].revert).to.have.property('called', false) + expect(options.migrations[0].revert).to.have.property('called', false) + }) + + it('should report progress when progress callback is supplied', async () => { + const options = createOptions() + const onProgressStub = sinon.stub() + options.onProgress = onProgressStub + getVersionStub.returns(4) + + options.migrations[2].revert = async (backends, onProgress) => { + onProgress(50, 'hello') + } + + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) + .to.eventually.be.fulfilled() + + expect(onProgressStub.getCall(0).calledWith(3, '50.00', 'hello')).to.be.true() + }) + + it('should unlock repo when error is thrown', async () => { + getVersionStub.returns(4) + const options = createOptions() + options.migrations[2].revert = sinon.stub().rejects() + + await expect(migrator.revert('/some/path', backends, repoOptions, 2, options)) + .to.eventually.be.rejected() + + expect(lockCloseStub).to.have.property('calledOnce', true) + expect(lockStub).to.have.property('calledOnce', true) + + // The last successfully reverted migration should be set as repo's version + expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() + }) + }) + + describe('migrate', () => { + it('should error with out path argument', () => { + const options = createOptions() + + // @ts-expect-error invalid params + return expect(migrator.migrate(undefined, undefined, undefined, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error with out backends argument', () => { + const options = createOptions() + + // @ts-expect-error invalid params + return expect(migrator.migrate('/some/path', undefined, undefined, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error with out repoOptions argument', () => { + const options = createOptions() + + // @ts-expect-error invalid params + return expect(migrator.migrate('/some/path', backends, undefined, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error with out toVersion argument', () => { + const options = createOptions() + + // @ts-expect-error invalid params + return expect(migrator.migrate('/some/path', backends, repoOptions, undefined, options)) + .to.eventually.be.rejectedWith(errors.RequiredParameterError).with.property('code', errors.RequiredParameterError.code) + }) + + it('should error with invalid toVersion argument', () => { + const invalidValues = ['eight', '-1', '1', -1, {}] + + return Promise.all( + // @ts-expect-error invalid params + invalidValues.map((invalidValue) => expect(migrator.migrate('/some/path', backends, repoOptions, invalidValue, createOptions())) + .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code)) + ) + }) + + it('should verify that all migrations are available', () => { + const options = { + migrations: [ + { + version: 3, + description: '', + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + }, + { + version: 4, + description: '', + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + } + ] + } + + getVersionStub.returns(1) + + return expect(migrator.migrate('/some/path', backends, repoOptions, 3, options)) + .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) + }) + + it('should verify that all migrations are available', () => { + const options = { + migrations: [ + { + version: 3, + description: '', + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + }, + { + version: 4, + description: '', + migrate: sinon.stub().resolves(), + revert: sinon.stub().resolves() + } + ] + } + + getVersionStub.returns(3) + + return expect(migrator.migrate('/some/path', backends, repoOptions, 5, options)) + .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) + }) + + it('should not migrate if current repo version and toVersion matches', async () => { + getVersionStub.returns(2) + const options = createOptions() + + await expect(migrator.migrate('/some/path', backends, repoOptions, 2, options)) + .to.eventually.be.fulfilled() + + expect(lockStub).to.have.property('called', false) + }) + + it('should not migrate if current repo version is higher then toVersion', async () => { + getVersionStub.returns(3) + const options = createOptions() + + await expect(migrator.migrate('/some/path', backends, repoOptions, 2, options)) + .to.eventually.be.rejectedWith(errors.InvalidValueError).with.property('code', errors.InvalidValueError.code) + + expect(lockStub).to.have.property('called', false) + }) + + it('should migrate expected migrations', async () => { + const options = createOptions() + getVersionStub.returns(1) + + await expect(migrator.migrate('/some/path', backends, repoOptions, 3, options)) + .to.eventually.be.fulfilled() + + expect(lockCloseStub).to.have.property('calledOnce', true) + expect(lockStub).to.have.property('calledOnce', true) + expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() + + // Checking migrations + expect(options.migrations[3].migrate).to.have.property('called', false) + expect(options.migrations[2].migrate).to.have.property('calledOnce', true) + expect(options.migrations[1].migrate).to.have.property('calledOnce', true) + expect(options.migrations[0].migrate).to.have.property('called', false) + }) + + it('should not have any side-effects when in dry run', async () => { + const options = createOptions() + options.isDryRun = true + getVersionStub.returns(2) + + await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) + .to.eventually.be.fulfilled() + + expect(lockCloseStub).to.have.property('called', false) + expect(lockStub).to.have.property('called', false) + expect(setVersionStub).to.have.property('called', false) + + return options.migrations.forEach(({ migrate }) => expect(migrate).to.have.property('calledOnce', false)) + }) + + it('should not lock repo when ignoreLock is used', async () => { + const options = createOptions() + options.ignoreLock = true + getVersionStub.returns(2) + + await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) + .to.eventually.be.fulfilled() + + expect(lockCloseStub).to.have.property('called', false) + expect(lockStub).to.have.property('called', false) + expect(setVersionStub.calledOnceWith(4, backends)).to.be.true() + + // Checking migrations + expect(options.migrations[3].migrate).to.have.property('calledOnce', true) + expect(options.migrations[2].migrate).to.have.property('calledOnce', true) + expect(options.migrations[1].migrate).to.have.property('called', false) + expect(options.migrations[0].migrate).to.have.property('called', false) + }) + + it('should report progress when progress callback is supplied', async () => { + const options = createOptions() + const onProgressStub = sinon.stub() + options.onProgress = onProgressStub + getVersionStub.returns(2) + + options.migrations[2].migrate = async (backends, onProgress) => { + onProgress(50, 'hello') + } + + await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) + .to.eventually.be.fulfilled() + + expect(onProgressStub.getCall(0).calledWith(3, '50.00', 'hello')).to.be.true() + }) + + it('should unlock repo when error is thrown', async () => { + getVersionStub.returns(2) + const options = createOptions() + options.migrations[3].migrate = sinon.stub().rejects() + + await expect(migrator.migrate('/some/path', backends, repoOptions, 4, options)) + .to.eventually.be.rejected() + + expect(lockCloseStub).to.have.property('calledOnce', true) + expect(lockStub).to.have.property('calledOnce', true) + + // The last successfully migrated migration should be set as repo's version + expect(setVersionStub.calledOnceWith(3, backends)).to.be.true() + }) + }) +}) diff --git a/packages/ipfs-repo-migrations/test/init-test.js b/packages/ipfs-repo-migrations/test/init-test.js new file mode 100644 index 00000000..00688c95 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/init-test.js @@ -0,0 +1,57 @@ +/* eslint-env mocha */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const { CONFIG_KEY, VERSION_KEY } = require('../src/utils') +const repoInit = require('../src/repo/init') +const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') + +/** + * @param {import('./types').SetupFunction} setup + * @param {import('./types').CleanupFunction} cleanup + */ +module.exports = (setup, cleanup) => { + /** @type {string} */ + let dir + /** @type {import('../src/types').Backends} */ + let backends + + beforeEach(async () => { + ({ dir, backends } = await setup()) + }) + afterEach(() => + cleanup(dir) + ) + + it('should return true with valid initialized repo', async () => { + const store = backends.root + await store.open() + await store.put(VERSION_KEY, uint8ArrayFromString('7')) + await store.put(CONFIG_KEY, uint8ArrayFromString('config')) + await store.close() + + expect(await repoInit.isRepoInitialized(backends)).to.be.true() + }) + + it('should return false with missing version key', async () => { + const store = backends.root + await store.open() + await store.put(CONFIG_KEY, uint8ArrayFromString('')) + await store.close() + + expect(await repoInit.isRepoInitialized(backends)).to.be.false() + }) + + it('should return false with missing config key', async () => { + const store = backends.root + await store.open() + await store.put(VERSION_KEY, uint8ArrayFromString('')) + await store.close() + + expect(await repoInit.isRepoInitialized(backends)).to.be.false() + }) + + it('should return false if the repo does not exists', async () => { + return expect(await repoInit.isRepoInitialized(backends)).to.be.false() + }) +} diff --git a/packages/ipfs-repo-migrations/test/integration-test.js b/packages/ipfs-repo-migrations/test/integration-test.js new file mode 100644 index 00000000..eca91921 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/integration-test.js @@ -0,0 +1,77 @@ +/* eslint-env mocha */ +'use strict' + +const { expect } = require('aegir/utils/chai') + +const migrator = require('../src') +const migrations = require('./test-migrations') +const { VERSION_KEY, CONFIG_KEY } = require('../src/utils') +const { initRepo } = require('./fixtures/repo') + +/** + * @param {import('./types').SetupFunction} setup + * @param {import('./types').CleanupFunction} cleanup + */ +module.exports = (setup, cleanup) => { + /** @type {string} */ + let dir + /** @type {import('../src/types').Backends} */ + let backends + const repoOptions = { + repoLock: { + locked: () => Promise.resolve(false), + lock: () => Promise.resolve({ + close: () => Promise.resolve() + }) + }, + autoMigrate: true, + onMigrationProgress: () => {}, + repoOwner: true + } + + beforeEach(async () => { + ({ dir, backends } = await setup()) + await initRepo(backends) + }) + + afterEach(() => cleanup(dir)) + + it('migrate forward', async () => { + await migrator.migrate(dir, backends, repoOptions, migrator.getLatestMigrationVersion(migrations), { + migrations: migrations, + onProgress: () => {} + }) + + const store = backends.root + await store.open() + const version = await store.get(VERSION_KEY) + expect(version.toString()).to.be.equal('2') + + const config = await store.get(CONFIG_KEY) + expect(config.toString()).to.include(migrations[0].newApiAddr) + + await store.close() + }) + + it('revert', async () => { + await migrator.migrate(dir, backends, repoOptions, migrator.getLatestMigrationVersion(migrations), { + migrations: migrations, + onProgress: () => {} + }) + + await migrator.revert(dir, backends, repoOptions, 1, { + migrations: migrations, + onProgress: () => {} + }) + + const store = backends.root + await store.open() + const version = await store.get(VERSION_KEY) + expect(version.toString()).to.be.equal('1') + + const config = await store.get(CONFIG_KEY) + expect(config.toString()).to.not.include(migrations[0].newApiAddr) + + await store.close() + }) +} diff --git a/packages/ipfs-repo-migrations/test/migrations/index.js b/packages/ipfs-repo-migrations/test/migrations/index.js new file mode 100644 index 00000000..a6f93555 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/migrations/index.js @@ -0,0 +1,12 @@ +'use strict' + +/** + * @param {import('../types').SetupFunction} setup + * @param {import('../types').CleanupFunction} cleanup + */ +module.exports = (setup, cleanup) => { + require('./migration-8-test')(setup, cleanup) + require('./migration-9-test')(setup, cleanup) + require('./migration-10-test')(setup, cleanup) + require('./migration-11-test')(setup, cleanup) +} diff --git a/packages/ipfs-repo-migrations/test/migrations/migration-10-test.js b/packages/ipfs-repo-migrations/test/migrations/migration-10-test.js new file mode 100644 index 00000000..391320b8 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/migrations/migration-10-test.js @@ -0,0 +1,184 @@ +/* eslint-env mocha */ +/* eslint-disable max-nested-callbacks */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const { CID } = require('multiformats/cid') +const { BaseBlockstore } = require('blockstore-core/base') + +const migration = require('../../migrations/migration-10') +const Key = require('interface-datastore').Key +const { fromString } = require('uint8arrays/from-string') +const { equals } = require('uint8arrays/equals') +// @ts-expect-error no types +const Level5 = require('level-5') +// @ts-expect-error no types +const Level6 = require('level-6') + +/** + * @typedef {import('../../src/types').Backends} Backends + * @typedef {import('interface-datastore').Datastore} Datastore + */ + +/** + * @type {Record} + */ +const keys = { + CIQCKN76QUQUGYCHIKGFE6V6P3GJ2W26YFFPQW6YXV7NFHH3QB2RI3I: fromString('hello'), + CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ: fromString('derp') +} + +/** + * @param {Datastore} store + */ +async function bootstrap (store) { + await store.open() + + for (const name of Object.keys(keys)) { + if (store instanceof BaseBlockstore) { + await store.put(CID.parse(`b${name.toLowerCase()}`), keys[name]) + } else { + await store.put(new Key(name), keys[name]) + } + } + + await store.close() +} + +/** + * @param {Datastore} store + */ +async function validate (store) { + await store.open() + + for (const name of Object.keys(keys)) { + if (store instanceof BaseBlockstore) { + const key = CID.parse(`b${name.toLowerCase()}`) + + expect(await store.has(key)).to.be.true(`Could not read key ${name}`) + expect(equals(await store.get(key), keys[name])).to.be.true(`Could not read value for key ${keys[name]}`) + } else { + const key = new Key(`/${name}`) + + await expect(store.has(key)).to.eventually.be.true(`Could not read key ${name}`) + expect(equals(await store.get(key), keys[name])).to.be.true(`Could not read value for key ${keys[name]}`) + } + } + + await store.close() +} + +/** + * @param {Backends} backends + * @param {*} LevelImpl + * @returns {Backends} + */ +function withLevels (backends, LevelImpl) { + const output = { + ...backends + } + + Object.entries(backends) + .forEach(([key, value]) => { + // @ts-ignore it's ok + output[key] = withLevel(value, LevelImpl) + }) + + return output +} + +/** + * @param {Datastore} store + * @param {*} LevelImpl + */ +function withLevel (store, LevelImpl) { + let parent = { + child: store + } + + while (parent.child) { + if (parent.child.constructor.name === 'LevelDatastore') { + // @ts-ignore undocumented properties + parent.child.database = LevelImpl + // @ts-ignore undocumented properties + delete parent.child.db + + return store + } + + // @ts-ignore undocumented properties + parent = parent.child + } + + return store +} + +/** + * @param {import('../types').SetupFunction} setup + * @param {import('../types').CleanupFunction} cleanup + */ +module.exports = (setup, cleanup) => { + describe('migration 10', function () { + this.timeout(1024 * 1000) + /** @type {string} */ + let dir + /** @type {import('../../src/types').Backends} */ + let backends + + beforeEach(async () => { + ({ dir, backends } = await setup()) + }) + + afterEach(async () => { + await cleanup(dir) + }) + + describe('forwards', () => { + beforeEach(async () => { + for (const backend of Object.values(backends)) { + await bootstrap(withLevel(backend, Level5)) + } + }) + + it('should migrate keys and values forward', async () => { + await migration.migrate(withLevels(backends, Level6), () => {}) + + for (const backend of Object.values(backends)) { + await validate(withLevel(backend, Level6)) + } + }) + }) + + describe('backwards using level@6.x.x', () => { + beforeEach(async () => { + for (const backend of Object.values(backends)) { + await bootstrap(withLevel(backend, Level6)) + } + }) + + it('should migrate keys and values backward', async () => { + await migration.revert(withLevels(backends, Level6), () => {}) + + for (const backend of Object.values(backends)) { + await validate(withLevel(backend, Level5)) + } + }) + }) + + describe('backwards using level@5.x.x', () => { + beforeEach(async () => { + for (const backend of Object.values(backends)) { + await bootstrap(withLevel(backend, Level6)) + } + }) + + it('should migrate keys and values backward', async () => { + await migration.revert(withLevels(backends, Level5), () => {}) + + for (const backend of Object.values(backends)) { + await validate(withLevel(backend, Level5)) + } + }) + }) + }) +} diff --git a/packages/ipfs-repo-migrations/test/migrations/migration-11-test.js b/packages/ipfs-repo-migrations/test/migrations/migration-11-test.js new file mode 100644 index 00000000..baeff1c7 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/migrations/migration-11-test.js @@ -0,0 +1,75 @@ +/* eslint-env mocha */ +/* eslint-disable max-nested-callbacks */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const { CID } = require('multiformats/cid') +const migration = require('../../migrations/migration-11') +const { Key } = require('interface-datastore') + +const MFS_ROOT_KEY = new Key('/local/filesroot') +const MFS_ROOT = CID.parse('Qmc42sn2WBHYeAShU3nx8mYkhKVq4sRLapawTaGh4XH4iE') + +/** + * @param {import('../types').SetupFunction} setup + * @param {import('../types').CleanupFunction} cleanup + */ +module.exports = (setup, cleanup) => { + describe('migration 11', function () { + this.timeout(1024 * 1000) + /** @type {string} */ + let dir + /** @type {import('../../src/types').Backends} */ + let backends + + beforeEach(async () => { + ({ dir, backends } = await setup()) + }) + + afterEach(async () => { + await cleanup(dir) + }) + + describe('forwards', () => { + beforeEach(async () => { + await backends.root.open() + await backends.root.put(MFS_ROOT_KEY, MFS_ROOT.bytes) + await backends.root.close() + }) + + it('should migrate MFS root forward', async () => { + await migration.migrate(backends, () => {}) + + await backends.root.open() + await backends.datastore.open() + + await expect(backends.root.has(MFS_ROOT_KEY)).to.eventually.be.false() + await expect(backends.datastore.has(MFS_ROOT_KEY)).to.eventually.be.true() + + await backends.datastore.close() + await backends.root.close() + }) + }) + + describe('backwards', () => { + beforeEach(async () => { + await backends.datastore.open() + await backends.datastore.put(MFS_ROOT_KEY, MFS_ROOT.bytes) + await backends.datastore.close() + }) + + it('should migrate MFS root backward', async () => { + await migration.revert(backends, () => {}) + + await backends.root.open() + await backends.datastore.open() + + await expect(backends.root.has(MFS_ROOT_KEY)).to.eventually.be.true() + await expect(backends.datastore.has(MFS_ROOT_KEY)).to.eventually.be.false() + + await backends.datastore.close() + await backends.root.close() + }) + }) + }) +} diff --git a/packages/ipfs-repo-migrations/test/migrations/migration-8-test.js b/packages/ipfs-repo-migrations/test/migrations/migration-8-test.js new file mode 100644 index 00000000..c816e3ce --- /dev/null +++ b/packages/ipfs-repo-migrations/test/migrations/migration-8-test.js @@ -0,0 +1,160 @@ +/* eslint-env mocha */ +/* eslint-disable max-nested-callbacks */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') + +const migration = require('../../migrations/migration-8') +const Key = require('interface-datastore').Key + +/** + * @typedef {import('../../src/types').Backends} Backends + * @typedef {import('interface-datastore').Datastore} Datastore + */ + +const blocksFixtures = [ + ['AFKREIBFG77IKIKDMBDUFDCSPK7H5TE5LNPMCSXYLPML27WSTT5YA5IUNU', + 'CIQCKN76QUQUGYCHIKGFE6V6P3GJ2W26YFFPQW6YXV7NFHH3QB2RI3I'], + ['AFKREIGKJES3Y4374YQUBLUYHT2R74JHR6VJP6RYXPYO6QB2WXIYQ32764', + 'CIQMUSJFXRZX7ZRBICXJQPHVD7YSPD5KS75DRO7Q55ADVNORRBXV75Y'], + ['AFKREIFFFQ3AEAYMXEJO37SN5FYAF7NN7HKFMZWDXYJCULX3LW4TYHK7UY', + 'CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ'], + ['AFKREIFAP4QO4GPFHESNT2X4WYPBOJU2ZKTCZ3LHJTX7TLM4DB2H6TUCL4', + 'CIQKA7ZA5YM6KOJE3HVPZNQ6C4TJVSVGFTWWOTHP7GWZYGDUP5HIEXY'], + ['AFKREIESTIYDYOO2RIFWPQEWS5DC62D2ADDDRPFVQD7K4BSFFYGB6IFUF4', + 'CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY'], + ['AFKREIH3OGADJ2GSATMRCC42P3QEV5AWEDTKQ64Y5EHFHJJYWFUFHQ7FOU', + 'CIQPW4MAGTUNEBGZCEFZU7XAJL2BMIHGVB5ZR2IOKOSTRMLIKPB6K5I'], + ['AFKREIEQYB5HPFOBDE2RBJUW2H67YDY6JFD476HEEJQQTFXGBHN4XF3FTA', + 'CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA'], + ['AFKREIHFQYMZMQHBUTDD7I4MKQ2LTZZNZGOSHI4R2PNV4HSC2ACSOJAWOE', + 'CIQOLBQZSZAODJGGH6RYYVBUXHTS3SM5EORZDU63LYPEFUAFE4SBM4I'], + ['AFKREIGBTJ4X7IP5LEGNFZNUFUOPL4SG4KNZC2COF6DUAS4B3Q2FY6SWUA', + 'CIQMDGTZP6Q72WIM2LS3ILI46XZENYU3SFUE4L4HIBFYDXBULR5FNIA'], + ['AFKREIAZ6G7XZOUM6UDNMXKDFCVNPUCN53FDEWR3NJ4NVZAWYN6I7HKXHI', + 'CIQBT4N7PS5IZ5IG2ZOUGKFK27IE33WKGJNDW2TY3LSBNQ34R6OVOOQ'], + ['AFKREIGIHVSB2NLGZG75SIIPJQ4A6GGOXZXUPVT7YDYHAUC4CDJMALWHUA', + 'CIQMQPLEDU2WNSN73EQQ6TBYB4MM5PTPI7LH7QHQOBIFYEGSYAXMPIA'], + ['AFKREIA6C72VFIB3TWL37LMIMZYY6YKVX72QYMAMLNMOQQHXHDJ3OT5HUY', + 'CIQB4F7VKKQDXHMXX6WYQZTRR5QVLP7VBQYAYW2Y5BAPOOGTW5H2PJQ'], + ['AFKREIES5CPHHXCMMCKT4GF6KB6QNY6IIQKQUPW2T5XV7OB5UERLI5JH2I', + 'CIQJF2E6OPOEYYEVHYML4UD5A3R4QRAVBI7NVH3PL64D3IJCWR2SPUQ'], + ['AFKREIGVTCG43WY2GKD44DHXV4W4XGDT42UKP7FRB6TC6R3CXSDCFHBYHU', + 'CIQNLGENZXNRUMUHZYGPPLZNZOMHHZVIU76LCD5GF5DWFPEGEKODQPI'], + ['AFKREIHDWDCEFGH4DQKJV67UZCMW7OJEE6XEDZDETOJUZJEVTENXQUVYKU', + 'CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI'], + ['AFKREIASB5VPMAOUNYILFUXBD3LRYVOSL4YEFQRFAHSB2ESG46Q6TU6Y5Q', + 'CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A'], + ['AFKREIHS5RDRQ6TIO4QUJHK5DLKWFQEX2SKGBWCVE4SPRGMSJHPJ4CQJNE', + 'CIQPF3CHDB5GQ5ZBISOV2GWVMLAJPVEUMDMFKJZE7CMZESO6TYFAS2I'], + ['AFKREIF2TF6KORYF3E2OSMSMQFW5CAJP6TRVIDP3ELHUTFJH2BENVSDZDE', + 'CIQLVGL4U5DQLWJU5EZEZALN2EAS75HDKQG7WIWPJGKSPUCI3LEHSGI'], + ['AFKREIG5MWZ56SM6LQUCMGCQISWAUCEBBEHPJKZAGIA722CPF3PYG353DI', + 'CIQN2ZNT35EZ4XBIEYMFARFMBIEICCIO6SVSAMQB7VUE6LW7QNX3WGQ'], + ['AFKREICZSSCDSBS7FFQZ55ASQDF3SMV6KLCW3GOFSZVWLYARCI47BGF354', + 'CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y'] +] + +/** + * @param {*} blockstore + * @returns {Datastore} + */ +function unwrap (blockstore) { + if (blockstore.child) { + return unwrap(blockstore.child) + } + + return blockstore +} + +/** + * @param {Backends} backends + * @param {boolean} encoded + */ +async function bootstrapBlocks (backends, encoded) { + const store = backends.blocks + await store.open() + + const datastore = unwrap(store) + + for (const blocksNames of blocksFixtures) { + const name = encoded ? blocksNames[1] : blocksNames[0] + + await datastore.put(new Key(`/${name}`), uint8ArrayFromString('')) + } + + await store.close() +} + +/** + * @param {Backends} backends + * @param {boolean} migrated + */ +async function validateBlocks (backends, migrated) { + const store = backends.blocks + await store.open() + + const datastore = unwrap(store) + + for (const blockNames of blocksFixtures) { + const newName = migrated ? blockNames[1] : blockNames[0] + const oldName = migrated ? blockNames[0] : blockNames[1] + + const oldKey = new Key(`/${oldName}`) + const newKey = new Key(`/${newName}`) + + expect(await datastore.has(oldKey)).to.be.false(`${oldName} was not migrated to ${newName}`) + expect(await datastore.has(newKey)).to.be.true(`${newName} was not removed`) + } + + await store.close() +} + +/** + * @param {import('../types').SetupFunction} setup + * @param {import('../types').CleanupFunction} cleanup + */ +module.exports = (setup, cleanup) => { + describe('migration 8', function () { + this.timeout(1024 * 1000) + /** @type {string} */ + let dir + /** @type {import('../../src/types').Backends} */ + let backends + + beforeEach(async () => { + ({ dir, backends } = await setup()) + }) + + afterEach(async () => { + await cleanup(dir) + }) + + describe('empty repo', () => { + describe('forwards', () => { + it('should migrate pins forward', async () => { + await migration.migrate(backends, () => {}) + }) + }) + + describe('backwards', () => { + it('should migrate pins backward', async () => { + await migration.revert(backends, () => {}) + }) + }) + }) + + it('should migrate blocks forward', async () => { + await bootstrapBlocks(backends, false) + await migration.migrate(backends, () => {}) + await validateBlocks(backends, true) + }) + + it('should migrate blocks backward', async () => { + await bootstrapBlocks(backends, true) + await migration.revert(backends, () => {}) + await validateBlocks(backends, false) + }) + }) +} diff --git a/packages/ipfs-repo-migrations/test/migrations/migration-9-test.js b/packages/ipfs-repo-migrations/test/migrations/migration-9-test.js new file mode 100644 index 00000000..b954feac --- /dev/null +++ b/packages/ipfs-repo-migrations/test/migrations/migration-9-test.js @@ -0,0 +1,236 @@ +/* eslint-env mocha */ +/* eslint-disable max-nested-callbacks */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const cbor = require('cborg') +const migration = require('../../migrations/migration-9') +const { PIN_DS_KEY } = require('../../migrations/migration-9/utils') +const { CID } = require('multiformats/cid') +const { CarReader } = require('@ipld/car') +const loadFixture = require('aegir/utils/fixtures') +const dagPb = require('@ipld/dag-pb') +const mhd = require('multiformats/hashes/digest') +const { base32 } = require('multiformats/bases/base32') + +/** + * @typedef {import('interface-datastore').Datastore} Datastore + * @typedef {import('interface-blockstore').Blockstore} Blockstore + * @typedef {import('interface-datastore').Key} Key + * @typedef {import('multiformats/cid').CIDVersion} CIDVersion + * @typedef {{ car: Uint8Array, root: CID, pins: number }} Pinset + */ + +/** + * @param {Key} key + * @param {{ version?: CIDVersion, codec?: number }} pin + */ +function pinToCid (key, pin) { + return CID.create( + pin.version || 0, + pin.codec || dagPb.code, + mhd.decode(base32.decode('b' + key.toString().toLowerCase().split('/').pop())) + ) +} + +// the test data is generated by the file /test/fixtures/generate-car-files.js +// nb. you need to `npm install ipfs@0.48.1` or below in a place it can `require` +// it from before running it +/** + * @type {Record} + */ +const pinsets = { + 'basic pinset': { + car: loadFixture('test/fixtures/pinset-basic.car'), + root: CID.parse('QmeKxgcTtiE1XfvwcVf8wc65GgMmZumEtXK6YJKuvf3VYx'), + pins: 31 + }, + 'multiple bucket pinset': { + car: loadFixture('test/fixtures/pinset-multiple-buckets.car'), + root: CID.parse('QmPGd36dodHj1LQtVWK3LcBVkkVWvfXHEwBHnMpN6tu4BD'), + + // we need at least 8192 pins in order to create a new bucket + pins: 9031 + } +} + +const directPins = [ + 'QmTgj3HVGSuCckhJURLbaBuhPgArb36MEkRhvh5A7WkiFR', + 'QmaLHnphKK4dBk9TuRe5uQMLztQgJ7VbAeaMR8LErHGkcH', + 'QmRnkQuzXiZSQ5DtXfkSsMtL6PyKTK1HBqUxcD8zmgQLQi', + 'QmfDfLw7rrzedHn7XUc7q5UNPyekREE1hFZrwDWfCNBqg8', + 'QmdSzyeG1wALG5vaDP75f8QqcZWRcU4EDtkeY9LnB38eP2', + 'QmR2iwMMYNcCJNcC73ku37QD3oeeV98jUdT2c2xTsaMYvR', + 'QmQMQrVxtNN5JsWVDWtpFyUkjbm8sNbSjy364pGQdfgSx2', + 'QmNgWoYcmsqs6uUhFysC7BjfKTbYXWnL3edpLQJSsWdRqF', + 'QmUjoRPzozKhjJyxkJaP1rgnp6Lvp43fCA247kyCZsGrhN', + 'QmciA8jujqBJmCsnUgp9uXwW4HEzhg7vH4oPKBeiJu5cXC' +] + +const nonDagPbRecursivePins = [ + 'bafyreigv7udtoqiouyeelfijgfix42yc44zsqncbmar6jadq7xfs4mgg4e', + 'bafyreif4nfemzpljifoquq5dqjgmddhiv53b66zbr7ul3goeahyhphxyhq', + 'bafyreif2d33ncuaeeb37jnjylbgrgot3acpy5g33rs5rqvbxxmcnei6tua', + 'bafyreig2zauiy4t5ltjwoaf6tjbdnanah4q6qz5ilol3g2bwfrblpcv2bm', + 'bafyreiglffsxrbgxrnlx7wu2n5rsdtd73ih7zf65pormaarrqr26uhczxa', + 'bafyreiboyxf575xniqegisc2okkerinv4gehmlmjrybcfsc4fbnkhn77te', + 'bafyreif7z4hkico2i4kv3q36ix2qb5g4y62q2filnlmqrkzzbkwt3ewtya', + 'bafyreiczsrn43dxizcmwav2gkjbrvngmqcmdywq7nwyb7a3vn5hssudhr4', + 'bafyreiguc2wwt77l63uan4opi6bo6b4uuizbmfhbx3izb5ca7qp2rtp2xi', + 'bafyreihkjb36nob7cezu3m5psjqo46cturnut4fi6x3fj7md4eiefsinsy' +] + +const nonDagPbDirectPins = [ + 'bafyreibuvrik6o3lyantziriciygeb6jbwocvd7kwtozrjo37n6dki5aom', + 'bafyreicn35rsdstjo2574mtympyup2a6rh7tb5pip3seg6s6j6epe7jduu', + 'bafyreiang6jqksnq7ka3vajo3jvxo624nzt2wskn422sdrjl2cbald4ckq', + 'bafyreie3f7gzq4dvdqitq75bxtkocjpfcny5dta3dzg4gi76q6ql3blfrq', + 'bafyreic54zlg7mq5tojnpj73qc5acjyyzz2kxksmtceavb6q4fryeksp6i', + 'bafyreih3zs3htz6qun62ogeqdlf2iyyw3zkfngelfjgrft3bjeeqegxwiq', + 'bafyreigebeyuxa37qu7p2bxpjn7wlf4itkgwfjiqzetraihvhobs6z4fw4', + 'bafyreigpw4hiw2uggape2nkd7dts3x7lpkpczmmfojtzofmodjkjfcikxq', + 'bafyreifumpjckmmnsiqmpfg4vsxgihjb3pwtygdjqiu6ztabswguko52xm', + 'bafyreiamyrx7wjuxyewnjsu6vfj2u4jzqz2tclukgzwuinic6zbukazgci' +] + +/** + * @param {Blockstore} blockstore + * @param {Datastore} datastore + * @param {Pinset} param2 + */ +async function bootstrapBlocks (blockstore, datastore, { car: carBuf, root: expectedRoot }) { + const car = await CarReader.fromBytes(carBuf) + const [actualRoot] = await car.getRoots() + + expect(actualRoot.toString()).to.equal(expectedRoot.toString()) + await blockstore.open() + + for await (const { cid, bytes } of car.blocks()) { + blockstore.put(CID.parse(cid.toString()), new Uint8Array(bytes.buffer, bytes.byteOffset, bytes.byteLength)) + } + + await blockstore.close() + + await datastore.open() + await datastore.put(PIN_DS_KEY, actualRoot.multihash.bytes) + await datastore.close() +} + +/** + * @param {Datastore} datastore + * @param {Pinset} pinset + */ +async function assertPinsetRootIsPresent (datastore, pinset) { + await datastore.open() + const buf = await datastore.get(PIN_DS_KEY) + await datastore.close() + const cid = CID.decode(buf) + expect(cid.toString()).to.equal(pinset.root.toString()) +} + +/** + * @param {import('../types').SetupFunction} setup + * @param {import('../types').CleanupFunction} cleanup + */ +module.exports = (setup, cleanup) => { + describe('migration 9', function () { + this.timeout(1024 * 1000) + /** @type {string} */ + let dir + /** @type {import('../../src/types').Backends} */ + let backends + + beforeEach(async () => { + ({ dir, backends } = await setup()) + }) + + afterEach(async () => { + await cleanup(dir) + }) + + describe('empty repo', () => { + describe('forwards', () => { + it('should migrate pins forward', async () => { + await migration.migrate(backends, () => {}) + }) + }) + + describe('backwards', () => { + it('should migrate pins backward', async () => { + await migration.revert(backends, () => {}) + }) + }) + }) + + Object.keys(pinsets).forEach(title => { + const pinset = pinsets[title] + + /** @type {Map} */ + const pinned = new Map() + + describe(title, () => { + describe('forwards', () => { + beforeEach(async () => { + await bootstrapBlocks(backends.blocks, backends.datastore, pinset) + await assertPinsetRootIsPresent(backends.datastore, pinset) + }) + + it('should migrate pins forward', async () => { + await migration.migrate(backends, () => {}) + + await backends.pins.open() + let migratedDirect = 0 + let migratedNonDagPBRecursive = 0 + + for await (const { key, value } of backends.pins.query({})) { + pinned.set(key, value) + + const pin = cbor.decode(value) + + const cid = pinToCid(key, pin) + + if (directPins.includes(`${cid}`) || nonDagPbDirectPins.includes(`${cid}`)) { + expect(pin.depth).to.equal(0) + migratedDirect++ + } else { + expect(pin.depth).to.equal(Infinity) + } + + if (nonDagPbRecursivePins.includes(`${cid}`)) { + migratedNonDagPBRecursive++ + } + } + + await backends.pins.close() + + expect(migratedDirect).to.equal(directPins.length + nonDagPbDirectPins.length) + expect(migratedNonDagPBRecursive).to.equal(nonDagPbRecursivePins.length) + expect(pinned).to.have.lengthOf(pinset.pins) + + await backends.datastore.open() + await expect(backends.datastore.has(PIN_DS_KEY)).to.eventually.be.false() + await backends.datastore.close() + }) + }) + + describe('backwards', () => { + beforeEach(async () => { + await backends.pins.open() + + for (const [key, value] of pinned.entries()) { + await backends.pins.put(key, value) + } + + await backends.pins.close() + }) + + it('should migrate pins backward', async () => { + await migration.revert(backends, () => {}) + + await assertPinsetRootIsPresent(backends.datastore, pinset) + }) + }) + }) + }) + }) +} diff --git a/packages/ipfs-repo-migrations/test/node.js b/packages/ipfs-repo-migrations/test/node.js new file mode 100644 index 00000000..fd719468 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/node.js @@ -0,0 +1,130 @@ +/* eslint-env mocha */ +'use strict' + +const os = require('os') +const rimraf = require('rimraf') +const { FsDatastore } = require('datastore-fs') +const { LevelDatastore } = require('datastore-level') +const { S3Datastore } = require('datastore-s3') +const { ShardingDatastore } = require('datastore-core/sharding') +const { NextToLast } = require('datastore-core/shard') +const { BlockstoreDatastoreAdapter } = require('blockstore-datastore-adapter') +const mockS3 = require('./fixtures/mock-s3') +const S3 = require('aws-sdk').S3 +const { createRepo } = require('./fixtures/repo') + +/** + * @param {string} dir + */ +async function cleanup (dir) { + await rimraf.sync(dir) +} + +const CONFIGURATIONS = [{ + name: 'with sharding', + cleanup, + /** + * @param {string} prefix + * @returns {import('../src/types').Backends} + */ + createBackends: (prefix) => { + return { + root: new FsDatastore(prefix), + blocks: new BlockstoreDatastoreAdapter( + new ShardingDatastore( + new FsDatastore(`${prefix}/blocks`, { + extension: '.data' + }), + new NextToLast(2)) + ), + datastore: new LevelDatastore(`${prefix}/datastore`), + keys: new FsDatastore(`${prefix}/keys`), + pins: new LevelDatastore(`${prefix}/pins`) + } + } +}, { + name: 'without sharding', + cleanup, + /** + * @param {string} prefix + * @returns {import('../src/types').Backends} + */ + createBackends: (prefix) => { + return { + root: new FsDatastore(prefix), + blocks: new BlockstoreDatastoreAdapter( + new FsDatastore(`${prefix}/blocks`, { + extension: '.data' + }) + ), + datastore: new LevelDatastore(`${prefix}/datastore`), + keys: new FsDatastore(`${prefix}/keys`), + pins: new LevelDatastore(`${prefix}/pins`) + } + } +}, { + name: 'with s3', + cleanup: async () => {}, + /** + * @param {string} prefix + * @returns {import('../src/types').Backends} + */ + createBackends: (prefix) => { + const s3Instance = new S3({ + params: { + Bucket: 'test' + } + }) + mockS3(s3Instance) + + return { + root: new S3Datastore(prefix, { + s3: s3Instance, + createIfMissing: false + }), + blocks: new BlockstoreDatastoreAdapter( + new ShardingDatastore( + new S3Datastore(`${prefix}/blocks`, { + s3: s3Instance, + createIfMissing: false + }), + new NextToLast(2) + ) + ), + datastore: new ShardingDatastore(new S3Datastore(`${prefix}/datastore`, { + s3: s3Instance, + createIfMissing: false + }), new NextToLast(2)), + keys: new ShardingDatastore(new S3Datastore(`${prefix}/keys`, { + s3: s3Instance, + createIfMissing: false + }), new NextToLast(2)), + pins: new ShardingDatastore(new S3Datastore(`${prefix}/pins`, { + s3: s3Instance, + createIfMissing: false + }), new NextToLast(2)) + } + } +}] + +CONFIGURATIONS.forEach(({ name, createBackends, cleanup }) => { + const setup = () => createRepo(createBackends, os.tmpdir()) + + describe(name, () => { + describe('version tests', () => { + require('./version-test')(setup, cleanup) + }) + + describe('migrations tests', () => { + require('./migrations')(setup, cleanup) + }) + + describe('init tests', () => { + require('./init-test')(setup, cleanup) + }) + + describe('integration tests', () => { + require('./integration-test')(setup, cleanup) + }) + }) +}) diff --git a/packages/ipfs-repo-migrations/test/test-migrations/index.js b/packages/ipfs-repo-migrations/test/test-migrations/index.js new file mode 100644 index 00000000..6676d164 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/test-migrations/index.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = [ + require('./migration-2') +] diff --git a/packages/ipfs-repo-migrations/test/test-migrations/migration-2/index.js b/packages/ipfs-repo-migrations/test/test-migrations/migration-2/index.js new file mode 100644 index 00000000..e33c48ea --- /dev/null +++ b/packages/ipfs-repo-migrations/test/test-migrations/migration-2/index.js @@ -0,0 +1,126 @@ +'use strict' + +const Key = require('interface-datastore').Key +const _set = require('just-safe-set') +const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') + +/** + * @typedef {import('../../../src/types').Backends} Backends + * @typedef {import('../../../src/types').MigrationProgressCallback} MigrationProgressCallback + */ + +const CONFIG_KEY = new Key('config') +const NEW_API_ADDRESS = '/ip6/::/tcp/5001' + +/** + * EXAMPLE MIGRATION + * ================= + * + * Shows how to update config values. Migrate: + * 1) Changes 'Addresses.API' to Array with new IPv6 localhost + * 2) Changes 'Gateway.HTTPHeaders.Access-Control-Allow-Origin' to specific origin + */ + +/** + * @param {*} config + */ +function addNewApiAddress (config) { + let apiAddrs = config.Addresses.API + + if (!Array.isArray(apiAddrs)) { + apiAddrs = [apiAddrs] + } + + if (apiAddrs.includes(NEW_API_ADDRESS)) { + return + } + apiAddrs.push(NEW_API_ADDRESS) + config.Addresses.API = apiAddrs + + return config +} + +/** + * @param {*} config + */ +function removeNewApiAddress (config) { + const apiAddrs = config.Addresses.API + + if (!Array.isArray(apiAddrs)) { + return config + } + + if (apiAddrs.length > 2) { + throw new Error('Not possible to revert as Addresses.API has more then 2 address, not sure what to do.') + } + + if (!apiAddrs.includes(NEW_API_ADDRESS)) { + throw new Error('Not possible to revert as Addresses.API has unknown address, not sure what to do.') + } + + _set(config, 'Addresses.API', apiAddrs[0] === NEW_API_ADDRESS ? apiAddrs[1] : apiAddrs[0]) + + return config +} + +/** + * @param {Backends} backends + * @param {MigrationProgressCallback} onProgress + */ +async function migrate (backends, onProgress) { + const store = backends.root + await store.open() + + try { + const rawConfig = await store.get(CONFIG_KEY) + let config = JSON.parse(rawConfig.toString()) + + // Convert Address.API to Array with new IPv6 localhost + config = addNewApiAddress(config) + + // Modify allowed origin + _set(config, 'Gateway.HTTPHeaders.Access-Control-Allow-Origin', 'some.origin.com') + + const buf = uint8ArrayFromString(JSON.stringify(config, null, 2)) + await store.put(CONFIG_KEY, buf) + } finally { + await store.close() + } + + onProgress(100, 'done!') +} + +/** + * @param {Backends} backends + * @param {MigrationProgressCallback} onProgress + */ +async function revert (backends, onProgress) { + const store = backends.root + await store.open() + + try { + const rawConfig = await store.get(CONFIG_KEY) + let config = JSON.parse(rawConfig.toString()) + + // If possible revert to previous value + config = removeNewApiAddress(config) + + // Reset origin + _set(config, 'Gateway.HTTPHeaders.Access-Control-Allow-Origin', '*') + + const buf = uint8ArrayFromString(JSON.stringify(config, null, 2)) + await store.put(CONFIG_KEY, buf) + } finally { + await store.close() + } + + onProgress(100, 'done!') +} + +module.exports = { + version: 2, + description: 'Updates config', + migrate, + revert, + newApiAddr: NEW_API_ADDRESS +} diff --git a/packages/ipfs-repo-migrations/test/types.d.ts b/packages/ipfs-repo-migrations/test/types.d.ts new file mode 100644 index 00000000..16b60ae6 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/types.d.ts @@ -0,0 +1,4 @@ +import { Backends } from '../src/types' + +export interface SetupFunction { (prefix?: string): Promise<{ dir: string, backends: Backends}> } +export interface CleanupFunction { (dir: string): Promise } diff --git a/packages/ipfs-repo-migrations/test/version-test.js b/packages/ipfs-repo-migrations/test/version-test.js new file mode 100644 index 00000000..57364c40 --- /dev/null +++ b/packages/ipfs-repo-migrations/test/version-test.js @@ -0,0 +1,60 @@ +/* eslint-env mocha */ +'use strict' + +const { expect } = require('aegir/utils/chai') +const { VERSION_KEY, CONFIG_KEY } = require('../src/utils') +const version = require('../src/repo/version') +const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') +const errors = require('../src/errors') + +// When new versioning mechanism is introduced in new version don't forget to update +// the range (from/to) of the previous version test's description + +/** + * @param {import('./types').SetupFunction} setup + * @param {import('./types').CleanupFunction} cleanup + */ +module.exports = (setup, cleanup) => { + /** @type {string} */ + let dir + /** @type {import('../src/types').Backends} */ + let backends + + beforeEach(async () => { + ({ dir, backends } = await setup()) + }) + + afterEach(() => cleanup(dir)) + + it('getVersion should fail without any version in repo', async () => { + await expect(version.getVersion(backends)).to.be.eventually.rejectedWith(errors.NotInitializedRepoError) + .with.property('code', errors.NotInitializedRepoError.code) + }) + + describe('version 7 and below', () => { + it('should get version number', async () => { + // Create version file + const store = backends.root + await store.open() + await store.put(CONFIG_KEY, uint8ArrayFromString('some dummy config')) + await store.put(VERSION_KEY, uint8ArrayFromString('7')) + await store.close() + + expect(await version.getVersion(backends)).to.be.equal(7) + }) + + it('should set version number', async () => { + await expect(version.getVersion(backends)).to.be.eventually.rejectedWith(errors.NotInitializedRepoError).with.property('code', errors.NotInitializedRepoError.code) + + // Create version file + const store = backends.root + await store.open() + await store.put(CONFIG_KEY, uint8ArrayFromString('some dummy config')) + await store.put(VERSION_KEY, uint8ArrayFromString('5')) + await store.close() + + await version.setVersion(7, backends) + expect(await version.getVersion(backends)).to.be.equal(7) + }) + }) +} diff --git a/packages/ipfs-repo-migrations/tsconfig.json b/packages/ipfs-repo-migrations/tsconfig.json new file mode 100644 index 00000000..e17e1c7d --- /dev/null +++ b/packages/ipfs-repo-migrations/tsconfig.json @@ -0,0 +1,15 @@ +{ + "extends": "aegir/src/config/tsconfig.aegir.json", + "compilerOptions": { + "outDir": "dist" + }, + "include": [ + "src", + "test", + "migrations" + ], + "exclude": [ + "migrations/migration-9/pin.js", + "migrations/migration-9/pin.d.ts" + ] +} diff --git a/.aegir.js b/packages/ipfs-repo/.aegir.js similarity index 100% rename from .aegir.js rename to packages/ipfs-repo/.aegir.js diff --git a/.gitattributes b/packages/ipfs-repo/.gitattributes similarity index 100% rename from .gitattributes rename to packages/ipfs-repo/.gitattributes diff --git a/CHANGELOG.md b/packages/ipfs-repo/CHANGELOG.md similarity index 100% rename from CHANGELOG.md rename to packages/ipfs-repo/CHANGELOG.md diff --git a/packages/ipfs-repo/LICENSE-APACHE b/packages/ipfs-repo/LICENSE-APACHE new file mode 100644 index 00000000..14478a3b --- /dev/null +++ b/packages/ipfs-repo/LICENSE-APACHE @@ -0,0 +1,5 @@ +Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. diff --git a/packages/ipfs-repo/LICENSE-MIT b/packages/ipfs-repo/LICENSE-MIT new file mode 100644 index 00000000..749aa1ec --- /dev/null +++ b/packages/ipfs-repo/LICENSE-MIT @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/packages/ipfs-repo/README.md b/packages/ipfs-repo/README.md new file mode 100644 index 00000000..b3f1361e --- /dev/null +++ b/packages/ipfs-repo/README.md @@ -0,0 +1,429 @@ +# IPFS Repo JavaScript Implementation + +[![](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://ipn.io) +[![](https://img.shields.io/badge/project-IPFS-blue.svg?style=flat-square)](http://ipfs.io/) +[![](https://img.shields.io/badge/freenode-%23ipfs-blue.svg?style=flat-square)](http://webchat.freenode.net/?channels=%23ipfs) +[![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) +[![Travis CI](https://flat.badgen.net/travis/ipfs/js-ipfs-repo)](https://travis-ci.com/ipfs/js-ipfs-repo) +[![codecov](https://codecov.io/gh/ipfs/js-ipfs-repo/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-ipfs-repo) [![Dependency Status](https://david-dm.org/ipfs/js-ipfs-repo.svg?style=flat-square)](https://david-dm.org/ipfs/js-ipfs-repo) +[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) + +> Implementation of the IPFS repo spec (https://github.com/ipfs/specs/blob/master/REPO.md) in JavaScript + +This is the implementation of the [IPFS repo spec](https://github.com/ipfs/specs/blob/master/REPO.md) in JavaScript. + +## Lead Maintainer + +[Alex Potsides](https://github.com/achingbrain) + +## Table of Contents + +- [Background](#background) +- [Install](#install) + - [npm](#npm) + - [Use in Node.js](#use-in-nodejs) + - [Use in a browser with browserify, webpack or any other bundler](#use-in-a-browser-with-browserify-webpack-or-any-other-bundler) + - [Use in a browser Using a script tag](#use-in-a-browser-using-a-script-tag) +- [Usage](#usage) +- [API](#api) + - [Setup](#setup) + - [`new Repo(path[, options])`](#new-repopath-options) + - [`Promise repo.init()`](#promise-repoinit) + - [`Promise repo.open()`](#promise-repoopen) + - [`Promise repo.close()`](#promise-repoclose) + - [`Promise repo.exists()`](#promiseboolean-repoexists) + - [`Promise repo.isInitialized()`](#promiseboolean-repoisinitialized) + - [Repos](#repos) + - [`Promise repo.put(key, value:Uint8Array)`](#promise-repoputkey-valueuint8array) + - [`Promise repo.get(key)`](#promiseuint8array-repogetkey) + - [Blocks](#blocks) + - [`Promise repo.blocks.put(block:Block)`](#promiseblock-repoblocksputblockblock) + - [`AsyncIterator repo.blocks.putMany(source:AsyncIterable)`](#asynciteratorblock-repoblocksputmanysourceasynciterableblock) + - [`Promise repo.blocks.get(cid:CID)`](#promiseblock-repoblocksgetcidcid) + - [`AsyncIterable repo.blocks.getMany(source:AsyncIterable)`](#asynciterableblock-repoblocksgetmanysourceasynciterablecid) + - [`Promise repo.blocks.has (cid:CID)`](#promiseboolean-repoblockshas-cidcid) + - [`Promise repo.blocks.delete (cid:CID)`](#promiseboolean-repoblocksdelete-cidcid) + - [`AsyncIterator repo.blocks.query (query)`](#asynciteratorblockcid-repoblocksquery-query) + - [`Promise repo.blocks.delete(cid:CID)`](#promisecid-repoblocksdeletecidcid) + - [`AsyncIterator repo.blocks.deleteMany(source:AsyncIterable)`](#asynciteratorcid-repoblocksdeletemanysourceasynciterablecid) + - [Datastore](#datastore) + - [`repo.datastore`](#repodatastore) + - [Config](#config) + - [`Promise repo.config.set(key:String, value:Object)`](#promise-repoconfigsetkeystring-valueobject) + - [`Promise repo.config.replace(value:Object)`](#promise-repoconfigreplacevalueobject) + - [`Promise repo.config.get(key:String)`](#promise-repoconfiggetkeystring) + - [`Promise repo.config.getAll()`](#promiseobject-repoconfiggetall) + - [`Promise repo.config.exists()`](#promiseboolean-repoconfigexists) + - [Version](#version) + - [`Promise repo.version.get()`](#promisenumber-repoversionget) + - [`Promise repo.version.set (version:Number)`](#promise-repoversionset-versionnumber) + - [API Addr](#api-addr) + - [`Promise repo.apiAddr.get()`](#promisestring-repoapiaddrget) + - [`Promise repo.apiAddr.set(value)`](#promise-repoapiaddrsetvalue) + - [Status](#status) + - [`Promise repo.stat()`](#promiseobject-repostat) + - [Lock](#lock) + - [`Promise lock.lock(dir)`](#promise-locklockdir) + - [`Promise closer.close()`](#promise-closerclose) + - [`Promise lock.locked(dir)`](#promiseboolean-locklockeddir) +- [Notes](#notes) + - [Migrations](#migrations) +- [Contribute](#contribute) +- [License](#license) + +## Background + +Here is the architectural reasoning for this repo: + +```bash + ┌────────────────────────────────────────┐ + │ IPFSRepo │ + └────────────────────────────────────────┘ + ┌─────────────────┐ + │ / │ + ├─────────────────┤ + │ Datastore │ + └─────────────────┘ + ┌───────────┴───────────┐ + ┌─────────────────┐ ┌─────────────────┐ + │ /blocks │ │ /datastore │ + ├─────────────────┤ ├─────────────────┤ + │ Datastore │ │ LevelDatastore │ + └─────────────────┘ └─────────────────┘ + +┌────────────────────────────────────────┐ ┌────────────────────────────────────────┐ +│ IPFSRepo - Default Node.js │ │ IPFSRepo - Default Browser │ +└────────────────────────────────────────┘ └────────────────────────────────────────┘ + ┌─────────────────┐ ┌─────────────────┐ + │ / │ │ / │ + ├─────────────────┤ ├─────────────────┤ + │ FsDatastore │ │ IdbDatastore │ + └─────────────────┘ └─────────────────┘ + ┌───────────┴───────────┐ ┌───────────┴───────────┐ +┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐ +│ /blocks │ │ /datastore │ │ /blocks │ │ /datastore │ +├─────────────────┤ ├─────────────────┤ ├─────────────────┤ ├─────────────────┤ +│ FlatfsDatastore │ │LevelDBDatastore │ │ IdbDatastore │ │ IdbDatastore │ +└─────────────────┘ └─────────────────┘ └─────────────────┘ └─────────────────┘ +``` + +This provides a well defined interface for creating and interacting with an IPFS repo. + +## Install + +### npm + +```sh +> npm install ipfs-repo +``` + +### Use in Node.js + +```js +var IPFSRepo = require('ipfs-repo') +``` + +### Use in a browser with browserify, webpack or any other bundler + +```js +var IPFSRepo = require('ipfs-repo') +``` + +### Use in a browser Using a script tag + +Loading this module through a script tag will make the `IpfsRepo` obj available in the global namespace. + +```html + +``` + +## Usage + +Example: + +```js +const Repo = require('ipfs-repo') +const repo = new Repo('/tmp/ipfs-repo') + +await repo.init({ cool: 'config' }) +await repo.open() +console.log('repo is ready') +``` + +This now has created the following structure, either on disk or as an in memory representation: + +``` +├── blocks +│   ├── SHARDING +│ └── _README +├── config +├── datastore +├── keys +└── version +``` + +## API + +### Setup + +#### `new Repo(path[, options])` + +Creates an IPFS Repo. + +Arguments: + +* `path` (string, mandatory): the path for this repo +* `options` (object, optional): may contain the following values + * `autoMigrate` (bool, defaults to `true`): controls automatic migrations of repository. + * `onMigrationProgress` (function(version, percentComplete, message)): callback function to be notified of migration progress + * `lock` ([Lock](#lock) or string *Deprecated*): what type of lock to use. Lock has to be acquired when opening. string can be `"fs"` or `"memory"`. + * `storageBackends` (object, optional): may contain the following values, which should each be a class implementing the [datastore interface](https://github.com/ipfs/interface-datastore#readme): + * `root` (defaults to [`datastore-fs`](https://github.com/ipfs/js-datastore-fs#readme) in Node.js and [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme) in the browser). Defines the back-end type used for gets and puts of values at the root (`repo.set()`, `repo.get()`) + * `blocks` (defaults to [`datastore-fs`](https://github.com/ipfs/js-datastore-fs#readme) in Node.js and [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme) in the browser). Defines the back-end type used for gets and puts of values at `repo.blocks`. + * `keys` (defaults to [`datastore-fs`](https://github.com/ipfs/js-datastore-fs#readme) in Node.js and [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme) in the browser). Defines the back-end type used for gets and puts of encrypted keys at `repo.keys` + * `datastore` (defaults to [`datastore-level`](https://github.com/ipfs/js-datastore-level#readme)). Defines the back-end type used as the key-value store used for gets and puts of values at `repo.datastore`. + +```js +const repo = new Repo('path/to/repo') +``` + +#### `Promise repo.init()` + +Creates the necessary folder structure inside the repo + +#### `Promise repo.open()` + +[Locks](https://en.wikipedia.org/wiki/Record_locking) the repo to prevent conflicts arising from simultaneous access + +#### `Promise repo.close()` + +Unlocks the repo. + +#### `Promise repo.exists()` + +Tells whether this repo exists or not. Returned promise resolves to a `boolean` + +#### `Promise repo.isInitialized()` + +The returned promise resolves to `false` if the repo has not been initialized and `true` if it has + +### Repos + +Root repo: + +#### `Promise repo.put(key, value:Uint8Array)` + +Put a value at the root of the repo + +* `key` can be a Uint8Array, a string or a [Key][] + +#### `Promise repo.get(key)` + +Get a value at the root of the repo + +* `key` can be a Uint8Array, a string or a [Key][] + +### Blocks + +#### `Promise repo.blocks.put(block:Block)` + +* `block` should be of type [Block][] + +#### `AsyncIterator repo.blocks.putMany(source:AsyncIterable)` + +Put many blocks. + +* `source` should be an AsyncIterable that yields entries of type [Block][] + +#### `Promise repo.blocks.get(cid:CID)` + +Get block. + +* `cid` is the content id of type [CID][] + +#### `AsyncIterable repo.blocks.getMany(source:AsyncIterable)` + +Get many blocks + +* `source` should be an AsyncIterable that yields entries of type [CID][] + +#### `Promise repo.blocks.has (cid:CID)` + +Indicate if a block is present for the passed CID + +* `cid` should be of the type [CID][] + +#### `Promise repo.blocks.delete (cid:CID)` + +Deletes a block + +* `cid` should be of the type [CID][] + +#### `AsyncIterator repo.blocks.query (query)` + +Query what blocks are available in blockstore. + +If `query.keysOnly` is true, the returned iterator will yield [CID][]s, otherwise it will yield [Block][]s + +* `query` is a object as specified in [interface-datastore](https://github.com/ipfs/interface-datastore#query). + +Datastore: + +#### `Promise repo.blocks.delete(cid:CID)` + +* `cid` should be of the type [CID][] + +Delete a block + +#### `AsyncIterator repo.blocks.deleteMany(source:AsyncIterable)` + +* `source` should be an Iterable or AsyncIterable that yields entries of the type [CID][] + +Delete many blocks + +### Datastore + +#### `repo.datastore` + +This contains a full implementation of [the `interface-datastore` API](https://github.com/ipfs/interface-datastore#api). + +### Config + +Instead of using `repo.set('config')` this exposes an API that allows you to set and get a decoded config object, as well as, in a safe manner, change any of the config values individually. + +#### `Promise repo.config.set(key:String, value:Object)` + +Set a config value. `value` can be any object that is serializable to JSON. + +* `key` is a string specifying the object path. Example: + +```js +await repo.config.set('a.b.c', 'c value') +const config = await repo.config.get() +assert.equal(config.a.b.c, 'c value') +``` + +#### `Promise repo.config.replace(value:Object)` + +Set the whole config value. `value` can be any object that is serializable to JSON. + +#### `Promise repo.config.get(key:String)` + +Get a config value. Returned promise resolves to the same type that was set before. + +* `key` is a string specifying the object path. Example: + +```js +const value = await repo.config.get('a.b.c') +console.log('config.a.b.c = ', value) +``` + +#### `Promise repo.config.getAll()` + +Get the entire config value. + +#### `Promise repo.config.exists()` + +Whether the config sub-repo exists. + +### Version + +#### `Promise repo.version.get()` + +Gets the repo version (an integer). + +#### `Promise repo.version.set (version:Number)` + +Sets the repo version + +### API Addr + +#### `Promise repo.apiAddr.get()` + +Gets the API address. + +#### `Promise repo.apiAddr.set(value)` + +Sets the API address. + +* `value` should be a [Multiaddr][] or a String representing a valid one. + +### Status + +#### `Promise repo.stat()` + +Gets the repo status. + +Returned promise resolves to an `Object` with the following keys: + +- `numObjects` +- `repoPath` +- `repoSize` +- `version` +- `storageMax` + +### Lock + +IPFS Repo comes with two built in locks: memory and fs. These can be imported via the following: + +```js +const fsLock = require('ipfs-repo/src/lock') // Default in Node.js +const memoryLock = require('ipfs-repo/src/lock-memory') // Default in browser +``` + +You can also provide your own custom Lock. It must be an object with the following interface: + +#### `Promise lock.lock(dir)` + +Sets the lock if one does not already exist. If a lock already exists, should throw an error. + +`dir` is a string to the directory the lock should be created at. The repo typically creates the lock at its root. + +Returns `closer`, where `closer` has a `close` method for removing the lock. + +#### `Promise closer.close()` + +Closes the lock created by `lock.open` + +If no error was thrown, the lock was successfully removed. + +#### `Promise lock.locked(dir)` + +Checks the existence of the lock. + +`dir` is the path to the directory to check for the lock. The repo typically checks for the lock at its root. + +Returned promise resolves to a `boolean` indicating the existence of the lock. + +## Notes + +- [Explanation of how repo is structured](https://github.com/ipfs/js-ipfs-repo/pull/111#issuecomment-279948247) + +### Migrations + +When there is a new repo migration and the version of the repo is increased, don't +forget to propagate the changes into the test repo (`test/test-repo`). + +**For tools that run mainly in the browser environment, be aware that disabling automatic +migrations leaves the user with no way to run the migrations because there is no CLI in the browser. In such +a case, you should provide a way to trigger migrations manually.** + +## Contribute + +There are some ways you can make this module better: + +- Consult our [open issues](https://github.com/ipfs/js-ipfs-repo/issues) and take on one of them +- Help our tests reach 100% coverage! + +This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). + +[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/CONTRIBUTING.md) + +## License + +[Apache-2.0](LICENSE-APACHE) OR [MIT](LICENSE-MIT) + +[CID]: https://github.com/multiformats/js-cid +[Key]: https://github.com/ipfs/interface-datastore#keys +[Block]: https://github.com/ipld/js-ipld-block +[Multiaddr]: https://github.com/multiformats/js-multiaddr diff --git a/graphs/arch.monopic b/packages/ipfs-repo/graphs/arch.monopic similarity index 100% rename from graphs/arch.monopic rename to packages/ipfs-repo/graphs/arch.monopic diff --git a/graphs/arch.txt b/packages/ipfs-repo/graphs/arch.txt similarity index 100% rename from graphs/arch.txt rename to packages/ipfs-repo/graphs/arch.txt diff --git a/packages/ipfs-repo/package.json b/packages/ipfs-repo/package.json new file mode 100644 index 00000000..25a1b82f --- /dev/null +++ b/packages/ipfs-repo/package.json @@ -0,0 +1,93 @@ +{ + "name": "ipfs-repo", + "version": "12.0.0", + "description": "IPFS Repo implementation", + "leadMaintainer": "Alex Potsides ", + "main": "src/index.js", + "types": "dist/src/index.d.ts", + "files": [ + "src", + "dist" + ], + "browser": { + "rimraf": false, + "datastore-fs": "datastore-level", + "./src/locks/fs.js": "./src/locks/memory.js", + "./src/default-options.js": "./src/default-options.browser.js" + }, + "scripts": { + "clean": "rimraf types dist", + "test": "aegir test", + "build": "aegir build", + "lint": "aegir ts -p check && aegir lint", + "release": "aegir release", + "release-minor": "aegir release --type minor", + "release-major": "aegir release --type major", + "depcheck": "aegir dep-check" + }, + "repository": { + "type": "git", + "url": "https://github.com/ipfs/js-ipfs-repo.git" + }, + "keywords": [ + "IPFS", + "libp2p", + "datastore" + ], + "license": "(Apache-2.0 OR MIT)", + "homepage": "https://github.com/ipfs/js-ipfs-repo/tree/master/packages/ipfs-repo", + "devDependencies": { + "@ipld/dag-cbor": "^6.0.4", + "@types/bytes": "^3.1.0", + "@types/debug": "^4.1.5", + "@types/proper-lockfile": "^4.1.1", + "@types/rimraf": "^3.0.0", + "aegir": "^35.0.2", + "assert": "^2.0.0", + "blockstore-core": "^1.0.2", + "blockstore-datastore-adapter": "^2.0.1", + "events": "^3.3.0", + "ipfs-utils": "^8.1.3", + "it-all": "^1.0.2", + "it-drain": "^1.0.1", + "it-first": "^1.0.2", + "just-range": "^2.1.0", + "rimraf": "^3.0.0", + "sinon": "^11.1.1", + "url": "^0.11.0", + "util": "^0.12.3" + }, + "dependencies": { + "@ipld/dag-pb": "^2.1.0", + "bytes": "^3.1.0", + "cborg": "^1.3.4", + "datastore-core": "^6.0.7", + "debug": "^4.1.0", + "err-code": "^3.0.1", + "eslint-plugin-ava": "^12.0.0", + "interface-blockstore": "^2.0.2", + "interface-datastore": "^6.0.2", + "ipfs-repo-migrations": "^10.0.0", + "it-filter": "^1.0.2", + "it-map": "^1.0.5", + "it-merge": "^1.0.2", + "it-parallel-batch": "^1.0.9", + "it-pipe": "^1.1.0", + "it-pushable": "^1.4.0", + "just-safe-get": "^2.0.0", + "just-safe-set": "^2.1.0", + "merge-options": "^3.0.4", + "mortice": "^2.0.1", + "multiformats": "^9.0.4", + "p-queue": "^6.0.0", + "proper-lockfile": "^4.0.0", + "sort-keys": "^4.0.0", + "uint8arrays": "^3.0.0" + }, + "eslintConfig": { + "extends": "ipfs", + "ignorePatterns": [ + "!.aegir.js" + ] + } +} diff --git a/src/api-addr.js b/packages/ipfs-repo/src/api-addr.js similarity index 100% rename from src/api-addr.js rename to packages/ipfs-repo/src/api-addr.js diff --git a/src/config.js b/packages/ipfs-repo/src/config.js similarity index 100% rename from src/config.js rename to packages/ipfs-repo/src/config.js diff --git a/src/constants.js b/packages/ipfs-repo/src/constants.js similarity index 100% rename from src/constants.js rename to packages/ipfs-repo/src/constants.js diff --git a/src/default-datastore.js b/packages/ipfs-repo/src/default-datastore.js similarity index 100% rename from src/default-datastore.js rename to packages/ipfs-repo/src/default-datastore.js diff --git a/src/default-options.browser.js b/packages/ipfs-repo/src/default-options.browser.js similarity index 100% rename from src/default-options.browser.js rename to packages/ipfs-repo/src/default-options.browser.js diff --git a/src/default-options.js b/packages/ipfs-repo/src/default-options.js similarity index 100% rename from src/default-options.js rename to packages/ipfs-repo/src/default-options.js diff --git a/src/errors/index.js b/packages/ipfs-repo/src/errors/index.js similarity index 100% rename from src/errors/index.js rename to packages/ipfs-repo/src/errors/index.js diff --git a/src/gc.js b/packages/ipfs-repo/src/gc.js similarity index 98% rename from src/gc.js rename to packages/ipfs-repo/src/gc.js index a9fad168..d39aa526 100644 --- a/src/gc.js +++ b/packages/ipfs-repo/src/gc.js @@ -2,7 +2,7 @@ const { CID } = require('multiformats/cid') const log = require('debug')('ipfs:repo:gc') -const { Errors } = require('interface-datastore') +const Errors = require('datastore-core/errors') const ERR_NOT_FOUND = Errors.notFoundError().code const parallelBatch = require('it-parallel-batch') const { pipe } = require('it-pipe') diff --git a/src/idstore.js b/packages/ipfs-repo/src/idstore.js similarity index 100% rename from src/idstore.js rename to packages/ipfs-repo/src/idstore.js diff --git a/src/index.js b/packages/ipfs-repo/src/index.js similarity index 100% rename from src/index.js rename to packages/ipfs-repo/src/index.js diff --git a/src/locks/fs.js b/packages/ipfs-repo/src/locks/fs.js similarity index 100% rename from src/locks/fs.js rename to packages/ipfs-repo/src/locks/fs.js diff --git a/src/locks/memory.js b/packages/ipfs-repo/src/locks/memory.js similarity index 100% rename from src/locks/memory.js rename to packages/ipfs-repo/src/locks/memory.js diff --git a/src/pinned-blockstore.js b/packages/ipfs-repo/src/pinned-blockstore.js similarity index 100% rename from src/pinned-blockstore.js rename to packages/ipfs-repo/src/pinned-blockstore.js diff --git a/src/pins.js b/packages/ipfs-repo/src/pins.js similarity index 100% rename from src/pins.js rename to packages/ipfs-repo/src/pins.js diff --git a/src/spec.js b/packages/ipfs-repo/src/spec.js similarity index 100% rename from src/spec.js rename to packages/ipfs-repo/src/spec.js diff --git a/src/types.d.ts b/packages/ipfs-repo/src/types.ts similarity index 100% rename from src/types.d.ts rename to packages/ipfs-repo/src/types.ts diff --git a/src/utils/blockstore.js b/packages/ipfs-repo/src/utils/blockstore.js similarity index 100% rename from src/utils/blockstore.js rename to packages/ipfs-repo/src/utils/blockstore.js diff --git a/src/utils/walk-dag.js b/packages/ipfs-repo/src/utils/walk-dag.js similarity index 100% rename from src/utils/walk-dag.js rename to packages/ipfs-repo/src/utils/walk-dag.js diff --git a/src/version.js b/packages/ipfs-repo/src/version.js similarity index 100% rename from src/version.js rename to packages/ipfs-repo/src/version.js diff --git a/test/api-addr-test.js b/packages/ipfs-repo/test/api-addr-test.js similarity index 100% rename from test/api-addr-test.js rename to packages/ipfs-repo/test/api-addr-test.js diff --git a/test/blockstore-test.js b/packages/ipfs-repo/test/blockstore-test.js similarity index 98% rename from test/blockstore-test.js rename to packages/ipfs-repo/test/blockstore-test.js index ad4607c7..144e47cb 100644 --- a/test/blockstore-test.js +++ b/packages/ipfs-repo/test/blockstore-test.js @@ -13,7 +13,7 @@ const first = require('it-first') const { fromString: uint8ArrayFromString } = require('uint8arrays/from-string') const { toString: uint8ArrayToString } = require('uint8arrays/to-string') const { equals: uint8ArrayEquals } = require('uint8arrays/equals') -const { BlockstoreAdapter } = require('interface-blockstore') +const { BaseBlockstore } = require('blockstore-core/base') const { sha256 } = require('multiformats/hashes/sha2') const { identity } = require('multiformats/hashes/identity') const raw = require('multiformats/codecs/raw') @@ -204,7 +204,7 @@ module.exports = (repo) => { const digest = await sha256.digest(data) const cid = CID.createV0(digest) - class ExplodingBlockStore extends BlockstoreAdapter { + class ExplodingBlockStore extends BaseBlockstore { /** * * @param {CID} c @@ -324,7 +324,7 @@ module.exports = (repo) => { const digest = await sha256.digest(data) const cid = CID.createV0(digest) - class ExplodingBlockStore extends BlockstoreAdapter { + class ExplodingBlockStore extends BaseBlockstore { /** * @param {CID} c */ diff --git a/test/blockstore-utils-test.js b/packages/ipfs-repo/test/blockstore-utils-test.js similarity index 100% rename from test/blockstore-utils-test.js rename to packages/ipfs-repo/test/blockstore-utils-test.js diff --git a/test/browser.js b/packages/ipfs-repo/test/browser.js similarity index 91% rename from test/browser.js rename to packages/ipfs-repo/test/browser.js index f6c0732f..82faa277 100644 --- a/test/browser.js +++ b/packages/ipfs-repo/test/browser.js @@ -4,8 +4,8 @@ const { createRepo } = require('../src') const loadCodec = require('./fixtures/load-codec') -const { MemoryDatastore } = require('interface-datastore') -const { MemoryBlockstore } = require('interface-blockstore') +const { MemoryDatastore } = require('datastore-core/memory') +const { MemoryBlockstore } = require('blockstore-core/memory') async function createTempRepo (options = {}) { const date = Date.now().toString() diff --git a/test/config-test.js b/packages/ipfs-repo/test/config-test.js similarity index 100% rename from test/config-test.js rename to packages/ipfs-repo/test/config-test.js diff --git a/test/datastore-test.js b/packages/ipfs-repo/test/datastore-test.js similarity index 100% rename from test/datastore-test.js rename to packages/ipfs-repo/test/datastore-test.js diff --git a/test/fixtures/create-backend.js b/packages/ipfs-repo/test/fixtures/create-backend.js similarity index 70% rename from test/fixtures/create-backend.js rename to packages/ipfs-repo/test/fixtures/create-backend.js index 7ceec596..474ee351 100644 --- a/test/fixtures/create-backend.js +++ b/packages/ipfs-repo/test/fixtures/create-backend.js @@ -1,7 +1,7 @@ 'use strict' -const { MemoryDatastore } = require('interface-datastore') -const BlockstoreDatastoreAdapter = require(('blockstore-datastore-adapter')) +const { MemoryDatastore } = require('datastore-core/memory') +const { BlockstoreDatastoreAdapter } = require('blockstore-datastore-adapter') function createBackend (overrides = {}) { return { diff --git a/test/fixtures/load-codec.js b/packages/ipfs-repo/test/fixtures/load-codec.js similarity index 100% rename from test/fixtures/load-codec.js rename to packages/ipfs-repo/test/fixtures/load-codec.js diff --git a/test/is-initialized.js b/packages/ipfs-repo/test/is-initialized.js similarity index 100% rename from test/is-initialized.js rename to packages/ipfs-repo/test/is-initialized.js diff --git a/test/keystore-test.js b/packages/ipfs-repo/test/keystore-test.js similarity index 100% rename from test/keystore-test.js rename to packages/ipfs-repo/test/keystore-test.js diff --git a/test/lock-test.js b/packages/ipfs-repo/test/lock-test.js similarity index 100% rename from test/lock-test.js rename to packages/ipfs-repo/test/lock-test.js diff --git a/test/migrations-test.js b/packages/ipfs-repo/test/migrations-test.js similarity index 100% rename from test/migrations-test.js rename to packages/ipfs-repo/test/migrations-test.js diff --git a/test/node.js b/packages/ipfs-repo/test/node.js similarity index 100% rename from test/node.js rename to packages/ipfs-repo/test/node.js diff --git a/test/options-test.js b/packages/ipfs-repo/test/options-test.js similarity index 100% rename from test/options-test.js rename to packages/ipfs-repo/test/options-test.js diff --git a/test/pins-test.js b/packages/ipfs-repo/test/pins-test.js similarity index 100% rename from test/pins-test.js rename to packages/ipfs-repo/test/pins-test.js diff --git a/test/repo-test.js b/packages/ipfs-repo/test/repo-test.js similarity index 98% rename from test/repo-test.js rename to packages/ipfs-repo/test/repo-test.js index 59135104..350bb194 100644 --- a/test/repo-test.js +++ b/packages/ipfs-repo/test/repo-test.js @@ -7,8 +7,8 @@ const tempDir = require('ipfs-utils/src/temp-dir') const { createRepo } = require('../') const Errors = require('../src/errors') const bytes = require('bytes') -const { Adapter, MemoryDatastore } = require('interface-datastore') -const { MemoryBlockstore } = require('interface-blockstore') +const { BaseDatastore, MemoryDatastore } = require('datastore-core') +const { MemoryBlockstore } = require('blockstore-core') const loadCodec = require('./fixtures/load-codec') const MemoryLock = require('../src/locks/memory') const createBackend = require('./fixtures/create-backend') @@ -198,7 +198,7 @@ module.exports = (repo) => { }) describe('locking', () => { - class ExplodingDatastore extends Adapter { + class ExplodingDatastore extends BaseDatastore { async open () { throw new Error('wat') } diff --git a/test/stat-test.js b/packages/ipfs-repo/test/stat-test.js similarity index 100% rename from test/stat-test.js rename to packages/ipfs-repo/test/stat-test.js diff --git a/tsconfig.json b/packages/ipfs-repo/tsconfig.json similarity index 100% rename from tsconfig.json rename to packages/ipfs-repo/tsconfig.json diff --git a/test/types.test-d.ts b/test/types.test-d.ts deleted file mode 100644 index d1edb342..00000000 --- a/test/types.test-d.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { expectType } from 'tsd' -import type { IPFSRepo } from '../' -import { createRepo } from '../' -import { MemoryDatastore } from 'interface-datastore' -import { MemoryBlockstore } from 'interface-blockstore' - -expectType(createRepo('', async () => ({ - name: '', - code: 0, - encode: () => new Uint8Array(), - decode: () => {} -}), { - root: new MemoryDatastore(), - blocks: new MemoryBlockstore(), - keys: new MemoryDatastore(), - pins: new MemoryDatastore(), - datastore: new MemoryDatastore() -})) diff --git a/types/just-range/index.d.ts b/types/just-range/index.d.ts deleted file mode 100644 index ceb1c6d2..00000000 --- a/types/just-range/index.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -declare function range (start: any, stop?: any, step?: any): any[] - -export = range diff --git a/types/merge-options/index.d.ts b/types/merge-options/index.d.ts deleted file mode 100644 index e5c2d769..00000000 --- a/types/merge-options/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -declare function mergeOptions (arg1: T1, arg: T2): Required -export = mergeOptions