Compare commits

..

No commits in common. "master" and "travis-node-update" have entirely different histories.

160 changed files with 40386 additions and 37484 deletions

View File

@ -1 +0,0 @@
tests/dist/

View File

@ -1,36 +0,0 @@
{
"env": {
"browser": true,
"commonjs": true,
"es6": true,
"mocha": true,
"node": true
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaVersion": 2017,
"sourceType": "module"
},
"rules": {
"indent": [
"error",
2
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
],
"eqeqeq": [
"error",
"always"
]
}
}

1
.gitattributes vendored
View File

@ -1 +0,0 @@
* text=auto eol=lf

View File

@ -1,27 +0,0 @@
name: node-js-ci
on:
pull_request:
branches:
- master
push:
branches:
- master
jobs:
build:
runs-on: ${{matrix.os}}
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
node: ['14', '16']
name: Node ${{ matrix.node }} on ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- name: Test
uses: actions/setup-node@v2.4.1
with:
node-version: ${{ matrix.node }}
- run: npm install
- run: npm test

12
.gitignore vendored
View File

@ -1,13 +1,5 @@
node_modules
bower_components
.env
*~
.vscode
.idea
# Parcel build dirs
.cache
tests/dist
# nyc code coverage
.nyc_output
coverage
dist/filer-issue225.js

1
.npmrc
View File

@ -1 +0,0 @@
loglevel=silent

View File

@ -1,17 +0,0 @@
{
"hooks": {
"before:init": ["npm run test"],
"before:bump": ["npm run build"]
},
"git": {
"pushRepo": "git@github.com:filerjs/filer.git",
"tagName": "v${version}"
},
"npm": {
"publish": true
},
"github": {
"pushRepo": "git@github.com:filerjs/filer.git",
"release": true
}
}

View File

@ -1,32 +1,9 @@
sudo: false
language: node_js
node_js:
- "lts/*"
cache:
directories:
- "node_modules"
os:
- linux
- osx
# Setup headless Firefox and Chrome support
# https://docs.travis-ci.com/user/gui-and-headless-browsers/#Using-the-Chrome-addon-in-the-headless-mode
env:
- MOZ_HEADLESS=1
addons:
chrome: stable
firefox: latest
before_install:
- google-chrome-stable --headless --disable-gpu --remote-debugging-port=9222 http://localhost &
after_success:
- npm install -g codecov
- npm run coverage
- codecov
- "8.11.3"
before_install: npm install -g grunt-cli
notifications:
email: false
irc: "irc.mozilla.org#filer"
env:
- "FILER_UPSTREAM_URI=\"default\" FILER_UPSTREAM_BRANCH=\"default\" FILER_UPSTREAM_REMOTE_NAME=\"default\""

View File

@ -5,5 +5,3 @@ Barry Tulchinsky <barry.tulchinsky@gmail.com> (@btulchinsky)
Kieran Sedgwick <kieran.sedgwick@gmail.com> (@sedge)
Yoav Gurevich <ygurevich@ymail.com>
Gideon Thomas <r.gideonthomas@gmail.com>
Abdirahman Guled <aguled2@myseneca.ca>
Ben Heidemann <ben@heidemann.co.uk>

View File

@ -9,67 +9,204 @@ message and I'll update it.
## Setup
To get a working build system do the following:
The Filer build system is based on [grunt](http://gruntjs.com/). To get a working build system
do the following:
```
npm install
npm install -g grunt-cli
```
Next, make sure you have installed Chrome and Firefox, which are needed for
running headless versions of the tests with `npm test`.
You can now run the following grunt tasks:
* `grunt jshint` will run [JSHint](http://www.jshint.com/) on your code (do this before submitting a pull request) to catch errors
* `grunt develop` will create a single file version of the library for testing in `dist/filer.js`
* `grunt release` like `develop` but will also create a minified version of the library in `dist/filer.min.js`
* `grunt test` or `grunt test-node` will run [JSHint](http://www.jshint.com/) on your code and the test suite in the context of `nodejs`
* `grunt test-browser` will run [JSHint](http://www.jshint.com/) and start a localhost server on port `1234`. Navigating to `localhost:1234/tests/index.html` will run the test suite in the context of the browser. **NOTE:** When finished, you will have to manually shut off the server by pressing `cmd/ctrl`+`c` in the same terminal session you ran `grunt test-browser`.
Once you've done some hacking and you'd like to have your work merged, you'll need to
make a pull request. If you're patch includes code, make sure to check that all the
unit tests pass, including any new tests you wrote. Finally, make sure you add yourself
to the `AUTHORS` file.
=======
### Releasing a new version
=======
`grunt publish` will:
* Run the `grunt release` task
* Bump `bower.json` & `package.json` version numbers according to a [Semver](http://semver.org/) compatible scheme (see ["How to Publish"](#how-to-publish) below)
* Create a git tag at the new version number
* Create a release commit including `dist/filer.js`, `dist/filer.min.js`, `bower.json` and `package.json`
* Push tag & commit to `origin/develop`
* Update the `gh-pages` branch with the contents of the `develop` branch
* Force push the `gh-pages` branch to `origin/gh-pages`
* Publish the new version of the module to NPM
#### How to configure
1. Copy `env.sample` to `.env`
2. Modify as needed, or leave alone for defaults
#### How to Publish
`grunt publish` can be run in four ways:
1. `grunt publish` - does a patch (x.x.X) bump
2. `grunt publish:patch` - also does a patch (x.x.X) bump
3. `grunt publish:minor` - does a minor (x.X.x) bump
4. `grunt publish:major` - does a major (X.x.x) bump
The user *must* be on their local `develop` branch before running any form of `grunt publish`, or else the task will fail loudly.
=======
## Tests
Tests are written using [Mocha](https://mochajs.org/) and [Chai](http://chaijs.com/api/bdd/).
There are a number of ways to run the tests. The preferred way is:
```
npm test
```
This will do a build, run the linting, start a server, and load the tests into
headless versions of Chrome and Firefox.
If you want more control over how tests are run, you can use other scripts:
* Linting is done via `npm run lint` or `npm run eslint`, both of which will run `eslint` on the `src` and `tests` directories. You can also use `npm run lint:fix` or `npm run eslint:fix`, which will run `eslint` with `--fix` on the `src` and `tests` directories, automatically fixing minor issues. Linting is run by default as part of `npm test`
* In headless versions of Chrome and Firefox using `npm test`. A report at the end will tell you what happened with each browser. Browser tests are preferred because they also test our providers (e.g., IndexedDB). They do take longer to run. You can also use `npm run karma-mocha-firefox` or `npm run karma-mocha-chrome` to run the tests in only one of the two headless browsers.
* In node.js using the Memory provider using `npm run test:node`. These run much faster, but don't run all tests (e.g., providers, watches).
* If you need to debug browser tests, or want to run them in a different browser, use `npm run test:manual`, which will start a server and you can point your browser to [http://localhost:1234](http://localhost:1234). Running the tests this way will also automatically watch your files, and hot-reload your code and tests, which is useful for debugging and trial/error testing.
* If you need to debug node.js test runs, you can do so using `npm run test:node-debug`. Then, open Chrome and browse to [chrome://inspect](chrome://inspect) and click on your tests in the inspector. The easiest way to get a breakpoint is to manually add a `debugger` keyword to your test code where you want the tests to stop.
> Tip: you can add `skip()` to any `it()` or `describe()` in Mocha to skip a test, or `only()` to have only that test run. For example: `describe.skip(...)` or `it.only(...)`.
* If you want to run migration tests separate from unit tests, use `npm run test:migrations`. Migration tests run at the end of a typical `npm test` run. If you need to create a new migration test, see [`tools/fs-image.js`](tools/fs-image.js) for details on how to generate a filesystem image, and [tests/filesystems/images/README.md](tests/filesystems/images/README.md) for more docs.
* If you want to manually generate coverage info for the tests, use `npm run coverage`. This is done automatically in Travis, so you shouldn't need to do it. You can see [https://codecov.io/gh/filerjs/filer](https://codecov.io/gh/filerjs/filer) for detailed reports.
Tests are writting using [Mocha](http://visionmedia.github.io/mocha/) and [Chai](http://chaijs.com/api/bdd/).
You can run the tests in your browser by running `grunt test-browser` and opening the `tests` directory @ `http://localhost:1234/tests`, or in a nodejs context by running `grunt test`.
There are a number of configurable options for the test suite, which are set via query string params.
First, you can choose which filer source to use (i.e., src/, dist/filer-test.js, dist/filer.js or dist/filer.min.js). The default is to use what is in /dist/filer-test.js, and you can switch to other versions like so:
First, you can choose which filer source to use (i.e., src/, dist/filer-test.js, dist/filer.js or dist/filer.min.js).
The default is to use what is in /dist/filer-test.js, and you can switch to other versions like so:
* tests/index.html?filer-dist/filer.js
* tests/index.html?filer-dist/filer.min.js
* tests/index.html?filer-src/filer.js (from src)
Second, you can specify which provider to use for all non-provider specific tests (i.e., most of the tests).
The default provider is `Memory`, and you can switch it like so:
* tests/index.html?filer-provider=memory
* tests/index.html?filer-provider=indexeddb
* tests/index.html?filer-provider=websql
If you're writing tests, make sure you write them in the same style as existing tests, which are
provider agnostic. See [`tests/lib/test-utils.js`](tests/lib/test-utils.js) and how it gets used
in various tests as an example.
provider agnostic. See `tests/lib/test-utils.js` and how it gets used in various tests as
an example.
## Releases
## Communication
In order to perform a release, you'll need commit access to the main Filer repo,
as well as access to publish to Filer's npm module. To do a release:
If you'd like to talk to someone about the project, you can reach us on irc.mozilla.org in the #filer or #mofodev channel. Look for "ack" or "humph".
## Grunt tasks
The six grunt tasks Filer provides for development are detailed here, including a description of the third party tasks that are used to complete the process in the order they are used. For details on the grunt task running framework, see [http://gruntjs.com/](http://gruntjs.com/).
Individual targets are shown *in italics*:
### 1. grunt develop
*This task is responsible for producing the Filer distribution files.*
#### `browserify` ([https://www.npmjs.org/package/grunt-browserify](https://www.npmjs.org/package/grunt-browserify))
* *:filerDist*: Combines the filer source tree into a single distribution file for the bower releases.
#### `uglify` ([https://www.npmjs.com/package/grunt-contrib-uglify](https://www.npmjs.com/package/grunt-contrib-uglify))
* Adds a banner to ensure every release's distribution files are unique (**NOTE**: This is required for a successful `grunt release`)
### 2. grunt release
*This task runs the `grunt test`, and `grunt develop` tasks in one command, preventing new distribution files from being generated if tests fail.*
### 3. grunt build-tests
*This task generates single-file versions of the test suite and a separate Filer distribution file for testing Filer's compatibility with the [requirejs module system](http://requirejs.og/).*
#### `clean` ([https://www.npmjs.com/package/grunt-contrib-clean](https://www.npmjs.com/package/grunt-contrib-clean))
* Deletes the current browserified test files (see [http://browserify.org/](http://browserify.org/) for more details)
#### `browserify` ([https://www.npmjs.org/package/grunt-browserify](https://www.npmjs.org/package/grunt-browserify))
* *:filerPerf*: Combines performance tests into a single distribution file for browser performance benchmarks
* *:filerTest*: Combines unit tests into a single distribution file for testing Filer in the browser
* *:filerIssue225*: Used to generate a distribution file for testing based on the current state of the code, without affecting the current release's distribution file
### 4. grunt test-node
*This task lints and tests the Filer library in a nodejs context, aborting if any of the subtasks fail.*
#### `jshint` ([https://www.npmjs.com/package/grunt-contrib-jshint](https://www.npmjs.com/package/grunt-contrib-jshint))
* Used to lint the source files.
#### `browserify` ([https://www.npmjs.org/package/grunt-browserify](https://www.npmjs.org/package/grunt-browserify))
* *:filerIssue225*: Used to generate a distribution file for testing based on the current state of the code, without affecting the current release's distribution file
#### `shell` ([https://www.npmjs.com/package/grunt-shell](https://www.npmjs.com/package/grunt-shell))
* *:mocha*: Runs Filer's test suite on nodejs using the mocha test framework from the command line interface
### 5. grunt test-browser
*This task generates all the files necessary for running Filer's test suite in a browser, and starts a simple HTTP server to access the tests from your browser of choice*
#### `jshint` ([https://www.npmjs.com/package/grunt-contrib-jshint](https://www.npmjs.com/package/grunt-contrib-jshint))
* Used to lint the source files.
#### `build-tests` ([#3-grunt-build-tests](#3-grunt-build-tests))
* Generates single-file versions of the test suite and a separate Filer distribution file for testing Filer's compatibility with the [requirejs module system](http://requirejs.og/).
#### `connect` ([https://www.npmjs.com/package/grunt-contrib-connect](https://www.npmjs.com/package/grunt-contrib-connect))
* *:serverForBrowser*: Starts a simple HTTP server pointing at the root of the Filer directory. Browsing to the '/tests/' directory will run the Filer tests in the browser.
### 6. grunt publish
#### `prompt` ([https://www.npmjs.com/package/grunt-prompt](https://www.npmjs.com/package/grunt-prompt))
* *confirm*: Interactive prompt task, used to confirm the kind of version release being requested by the user, and to give them an opportunity to abort the release. The prompt message is generated in the `grunt publish` task itself.
#### `npm-checkbranch` ([https://github.com/sedge/grunt-npm/tree/branchcheck](https://github.com/sedge/grunt-npm/tree/branchcheck))
* Causes `grunt publish` to fail out early if the user is not on the `develop` branch
#### `release` ([#2-grunt-release](#2-grunt-release))
* Runs the `grunt test`, and `grunt develop` tasks in one command, preventing new distribution files from being generated if tests fail.
#### `bump` ([https://www.npmjs.com/package/grunt-bump](https://www.npmjs.com/package/grunt-bump))
* Responsible for creating the latest tag and release commit of the repo. In order, it:
1. Bumps the version number in Filer's `package.json` and `bower.json` files
2. Creates a release commit including updated manifest files and new filer distribution files
3. Tags the repo at this new version
4. Pushes the tag and the release commit upstream
#### `build-tests` ([#3-grunt-build-tests](#3-grunt-build-tests))
* Generates single-file versions of the test suite and a separate Filer distribution file for testing Filer's compatibility with the [requirejs module system](http://requirejs.og/).
#### `usebanner` ([https://www.npmjs.com/package/grunt-banner](https://www.npmjs.com/package/grunt-banner))
* *:publish*: Adds a banner to the generated test and performance test files. The banner contents are generated as part of the `grunt publish` task itself.
#### `gitadd` ([https://www.npmjs.com/package/grunt-git](https://www.npmjs.com/package/grunt-git))
* *:publish*: Adds the Filer test files to git's staging area to allow us to stash it in the next step
#### `gitstash` ([https://www.npmjs.com/package/grunt-git](https://www.npmjs.com/package/grunt-git))
* *:publish*: Stashes the Filer test files in preparation for switching to the `gh-pages` branch in the next step
#### `gitcheckout` ([https://www.npmjs.com/package/grunt-git](https://www.npmjs.com/package/grunt-git))
* *:publish*: Checks out the `gh-pages` branch to prepare for committing the newly generated test files in the next three steps
#### `gitrm` ([https://www.npmjs.com/package/grunt-git](https://www.npmjs.com/package/grunt-git))
* *:publish*: Equivalent of `git rm -f`, this task forces a removal of the existing versions of the generated test files on this branch in preparation for the next step.
#### `gitstash` ([https://www.npmjs.com/package/grunt-git](https://www.npmjs.com/package/grunt-git))
* *:pop*: Equivalent of `git stash pop`, this task reintroduces the staging area containing the newest version of the generated test files.
#### `gitcommit` ([https://www.npmjs.com/package/grunt-git](https://www.npmjs.com/package/grunt-git))
* *:publish*: This task commits the current staging area containing the newly generated test files. The commit message is generated during the `grunt publish` task itself.
#### `gitpush` ([https://www.npmjs.com/package/grunt-git](https://www.npmjs.com/package/grunt-git))
* *:publish*: This task pushes the local `gh-pages` branch to the remote specified in Filer's .env file as FILER_UPSTREAM_REMOTE_NAME.
#### `gitcheckout` ([https://www.npmjs.com/package/grunt-git](https://www.npmjs.com/package/grunt-git))
* *:revert*: This task checks out back to the main branch ('develop' by default, specified in the .env as FILER_UPSTREAM_BRANCH)
#### `npm-publish` ([https://www.npmjs.com/package/grunt-npm](https://www.npmjs.com/package/grunt-npm))
* Publishes the latest release to NPM
1. Make sure you have a .env file, with your `GITHUB_TOKEN` included. See [`env.sample`](env.sample) for more info on how to create one.
1. Login to the `npm` registry if you haven't already using `npm login`
1. Run `npm run release`. Releases are done interactively using [release-it](https://www.npmjs.com/package/release-it), and our config is defined in [`.release-it.json`](.release-it.json).

View File

@ -1,4 +1,4 @@
Copyright (c) 2013 - 2019 Alan Kligman and the Filer contributors
Copyright (c) 2013, Alan Kligman
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:

483
README.md
View File

@ -1,26 +1,26 @@
[![NPM](https://nodei.co/npm/filer.png?downloads=true&stars=true)](https://nodei.co/npm/filer/)
[![Build Status](https://secure.travis-ci.org/filerjs/filer.png?branch=develop)](http://travis-ci.org/filerjs/filer) [![codecov](https://codecov.io/gh/filerjs/filer/branch/master/graph/badge.svg)](https://codecov.io/gh/filerjs/filer)
[![Build Status](https://secure.travis-ci.org/filerjs/filer.png?branch=develop)](http://travis-ci.org/filerjs/filer)
### Filer
Filer is a drop-in replacement for node's `fs` module, a POSIX-like file system
for browsers.
Filer is a POSIX-like file system interface for node.js and browser-based JavaScript.
### Compatibility
Filer uses [IndexedDB](https://developer.mozilla.org/en-US/docs/Web/API/IndexedDB_API)
and is [known to work in the following browsers/versions](https://caniuse.com/#feat=indexeddb):
Filer is known to work in the following browsers/versions, with the specified [Storage Providers](#providers):
* node.js: v0.10.*+
* IE: 10+
* Edge: 12+
* Firefox: 10+
* Chrome: 23+
* Safari: 10+
* Opera: 15+
* iOS: 10+
* Android Browser: 4.4+
* IE: 10+ (IndexedDB)
* Firefox: 26+ (IndexedDB)
* Chrome: 31+ (IndexedDB, WebSQL)
* Safari: 7.0+ (WebSQL)
* Opera: 19+ (IndexedDB, WebSQL)
* iOS: 3.2+ (WebSQL)
* Android Browser: 2.1-4.4 (WebSQL), 4.4+ (IndexedDB)
NOTE: if you're interested in maximum compatibility, use the `Fallback` provider instead of `Default`.
See the section on [Storage Providers](#providers).
### Contributing
@ -30,15 +30,16 @@ Want to join the fun? We'd love to have you! See [CONTRIBUTING](https://github.c
Filer can be obtained in a number of ways:
1. Via npm: `npm install filer`
1. Via unpkg: `<script src="https://unpkg.com/filer"></script>` or specify a version directly, for example: [https://unpkg.com/filer@1.0.1/dist/filer.min.js](https://unpkg.com/filer@1.0.1/dist/filer.min.js)
1. npm - `npm install filer`
2. bower - `bower install filer`
3. download pre-built versions: [filer.js](https://raw.github.com/filerjs/filer/develop/dist/filer.js), [filer.min.js](https://raw.github.com/filerjs/filer/develop/dist/filer.min.js)
### Loading and Usage
Filer is built as a UMD module and can therefore be loaded as a CommonJS or AMD module, or used via the global.
```javascript
// Option 1: Filer loaded via require()
// Option 1: Filer loaded via require() in node/browserify
var Filer = require('filer');
// Option 2: Filer loaded via RequireJS
@ -54,84 +55,13 @@ requirejs(['filer'], function(Filer) {...}
var Filer = window.Filer;
```
### Webpack Plugin
Filer can be used as a drop-in replacement for the node.js [fs](http://nodejs.org/api/fs.html) and
[path](http://nodejs.org/api/path.html) modules. For convenience, filer provides a webpack plugin which
will shim the desired node.js functionality. This plugin can be used by inserting the following into
your webpack config:
```javascript
// webpack.config.js
var { FilerWebpackPlugin } = require('filer/webpack');
module.exports = {
plugins: [
new FilerWebpackPlugin(),
],
}
```
---
**NOTE**
Previously it was recommended to access the `FilerWebpackPlugin` class by importing the main filer module. This was depracated due [this issue](https://github.com/filerjs/filer/issues/790). For anyone using ***filer version 1.4.0 or earlier***, please import the plugin class like this:
```javascript
var FilerWebpackPlugin = require('filer/src/webpack-plugin');
```
---
You can then import the node.js [fs](http://nodejs.org/api/fs.html) and [path](http://nodejs.org/api/path.html)
modules as normal and `FilerWebpackPlugin` will ensure that webpack will resolve references to these modules to
the appropriate filer shims. You will then be able to use these modules as normal (with the exception of the
synchronous fs methods e.g. `mkdirSync()`).
```javascript
import fs from 'fs';
import path from 'path';
```
The filer webpack plugin will, by default, shim the [fs](http://nodejs.org/api/fs.html) and
[path](http://nodejs.org/api/path.html) modules. However, it's behaviour can be customised by passing an
options object.
```javascript
// webpack.config.js
module.exports = {
plugins: [
new FilerWebpackPlugin({
// Options
}),
],
}
```
The following options can be passed to the filer webpack plugin:
| Option | Type | Optional | Default | Description |
|---------------|---------|----------|--------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------|
| filerDir | string | yes | '\<rootDir\>/node_modules/filer' | The directory in which filer is installed. |
| shimsDir | string | yes | '\<rootDir\>/node_modules/filer/shims' | The directory in which the shims are installed. |
| fsProviderDir | string | yes | '\<rootDir\>/node_modules/filer/shims/providers' | The directory in which the shims are located. This option is required when using a custom provider. |
| shimFs | boolean | yes | true | Should the fs module be shimmed. |
| shimPath | boolean | yes | true | Should the path module be shimmed. |
| fsProvider | string | yes | 'default' | The file system provider to use. Should be one of 'default', 'indexeddb', 'memory', 'custom'. The 'default' option is equivalent to 'indexeddb'. |
NOTE: '\<rootDir\>' will be resolved to the current working directory.
Though filer also exposes the Buffer object, it is left up to the user to shim this as appropriate. This is because filer offers
no custom implementation. Currently, filer uses the [node-libs-browser](https://github.com/webpack/node-libs-browser) Buffer implementation
internally, though any faithful implementation of the [node.js Buffer object](http://nodejs.org/api/buffer.html) should play nicely
with filer.
### Getting Started
Filer is as close to the node.js [fs module](http://nodejs.org/api/fs.html) as possible,
with the following differences:
* No synchronous versions of methods (e.g., `mkdir()` but not `mkdirSync()`).
* No permissions (e.g., no `chown()`, `chmod()`, etc.).
* No support for stream-based operations (e.g., `fs.ReadStream`, `fs.WriteStream`).
Filer has other features lacking in node.js (e.g., swappable backend
@ -148,32 +78,18 @@ they are invoked. Ensure proper ordering by chaining operations in callbacks.
To create a new file system or open an existing one, create a new `FileSystem`
instance. By default, a new [IndexedDB](https://developer.mozilla.org/en/docs/IndexedDB)
database is created for each file system. The file system can also use other
backend storage providers, for example `Memory`. See the section on [Storage Providers](#providers).
backend storage providers, for example [WebSQL](http://en.wikipedia.org/wiki/Web_SQL_Database)
or even RAM (i.e., for temporary storage). See the section on [Storage Providers](#providers).
<a name="overviewExample"></a>
```js
const { fs, path } = require('filer');
fs.mkdir('/docs', (err) => {
if (err) {
return console.error('Unable to create /docs dir', err);
}
const filename = path.join('/docs', 'first.txt');
const data = 'Hello World!\n';
fs.writeFile(filename, data, (err) => {
if (err) {
return console.error('Unable to write /docs/first.txt', err);
}
fs.stat(filename, (err, stats) => {
if (err) {
return console.error('Unable to stat /docs/first.txt', err);
}
console.log('Stats for /docs/first.txt:', stats);
```javascript
var fs = new Filer.FileSystem();
fs.open('/myfile', 'w+', function(err, fd) {
if (err) throw err;
fs.close(fd, function(err) {
if (err) throw err;
fs.stat('/myfile', function(err, stats) {
if (err) throw err;
console.log('stats: ' + JSON.stringify(stats));
});
});
});
@ -182,7 +98,7 @@ fs.mkdir('/docs', (err) => {
For a complete list of `FileSystem` methods and examples, see the [FileSystem Instance Methods](#FileSystemMethods)
section below.
Filer also includes node's `path` and `Buffer` modules. See the [Filer.Path](#FilerPath) and [Filer.Buffer](#FilerBuffer) sections below.
Filer also supports node's Path module. See the [Filer.Path](#FilerPath) section below.
In addition, common shell operations (e.g., rm, touch, cat, etc.) are supported via the
`FileSystemShell` object, which can be obtained from, and used with a `FileSystem`.
@ -194,29 +110,11 @@ Like node.js, callbacks for methods that accept them are optional but suggested
you omit the callback, errors will be thrown as exceptions). The first callback parameter is
reserved for passing errors. It will be `null` if no errors occurred and should always be checked.
#### Support for Promises
The Promise based API mimics the way node [implements](https://nodejs.org/api/fs.html#fs_fs_promises_api) them. Both `Shell` and `FileSystem` now have a `promises` property, which gives access to Promise based versions of methods in addition to the regular callback style methods. Method names are identical to their callback counterparts with the difference that instead of receiving a final argument as a callback, they return a Promise that is resolved or rejected based on the success of method execution.
See example below:
```javascript
const fs = new Filer.FileSystem().promises;
fs.writeFile('/myfile', 'some data')
.then(() => fs.stat('/myfile'))
.then(stats => { console.log(`stats: ${JSON.stringify(stats)}`); })
.catch(err => { console.error(err); });
```
#### Filer.FileSystem(options, callback) constructor
In most cases, using `Filer.fs` will be sufficient, and provide a working filesystem.
However, if you need more control over the filesystem, you can also use the `FileSystem`
constructor, invoked to open an existing file system or create a new one.
`Filer.FileSystem()` It accepts two arguments: an `options` object, and an optional
`callback` function. The `options` object can specify a number of optional arguments,
including:
File system constructor, invoked to open an existing file system or create a new one.
Accepts two arguments: an `options` object, and an optional `callback`. The `options`
object can specify a number of optional arguments, including:
* `name`: the name of the file system, defaults to `'"local'`
* `flags`: an Array of one or more flags to use when creating/opening the file system:
@ -251,17 +149,15 @@ it becomes ready.
#### Filer.FileSystem.providers - Storage Providers<a name="providers"></a>
Filer can be configured to use a number of different storage providers. The provider object encapsulates all aspects of data access, making it possible to swap in different backend storage options. There are currently 2 providers to choose from:
Filer can be configured to use a number of different storage providers. The provider object encapsulates all aspects
of data access, making it possible to swap in different backend storage options. There are currently 4 different
providers to choose from:
* `FileSystem.providers.IndexedDB()` - uses IndexedDB
if necessary
* `FileSystem.providers.WebSQL()` - uses WebSQL
* `FileSystem.providers.Fallback()` - attempts to use IndexedDB if possible, falling-back to WebSQL if necessary
* `FileSystem.providers.Memory()` - uses memory (not suitable for data that needs to survive the current session)
**NOTE**: previous versions of Filer also supported `FileSystem.providers.WebSQL()` and
`FileSystem.providers.Fallback()`, which could be used in browsers that supported
WebSQL but not IndexedDB. [WebSQL has been deprecated](https://www.w3.org/TR/webdatabase/),
and this functionality was removed in `v1.0.0`. If for some reason you still need it, use [`v0.0.44`](https://github.com/filerjs/filer/releases/tag/v0.0.44).
You can choose your provider when creating a `FileSystem`:
```javascript
@ -271,20 +167,28 @@ var providers = FileSystem.providers;
// Example 1: Use the default provider (currently IndexedDB)
var fs1 = new FileSystem();
// Example 2: Use the Memory provider
var fs2 = new FileSystem({ provider: new providers.Memory() });
// Example 2: Explicitly use IndexedDB
var fs2 = new FileSystem({ provider: new providers.IndexedDB() });
// Example 3: Use one of IndexedDB or WebSQL, whichever is supported
var fs3 = new FileSystem({ provider: new providers.Fallback() });
```
Every provider has an `isSupported()` method, which returns `true` if the browser supports this provider:
```javascript
if( Filer.FileSystem.providers.IndexedDB.isSupported() ) {
// IndexedDB provider will work in current environment...
if( Filer.FileSystem.providers.WebSQL.isSupported() ) {
// WebSQL provider will work in current environment...
}
```
You can also write your own provider if you need a different backend. See the code in `src/providers` for details.
A number of other providers have been written, including:
* node.js fs provider: https://github.com/humphd/filer-fs
* node.js Amazon S3 provider: https://github.com/alicoding/filer-s3
#### Filer.Buffer<a name="FilerBuffer"></a>
When reading and writing data, Filer follows node.js and uses [`Buffer`](http://nodejs.org/api/buffer.html).
@ -292,41 +196,16 @@ When in a node.js environment, native `Buffer`s can be used, or Filer.Buffer, wh
to node's `Buffer`. In a browser, you can use also use `Filer.Buffer`.
NOTE: a `Filer.Buffer` in a browser is really an augmented `Uint8Array` (i.e., the node `Buffer` api
methods are added to the instance). See https://github.com/feross/buffer for more details.
NOTE: `Filer.Buffer` currently includes the older, deprecated [constructor functions](https://nodejs.org/api/buffer.html#buffer_new_buffer_array), but these will be removed
at some point. You are encouraged to switch to use the newer class methods `Buffer.from()`
and `Buffer.alloc()`. See the [node.js Buffer docs](https://nodejs.org/api/buffer.html).
```js
/* Deprecated - see https://nodejs.org/api/buffer.html#buffer_new_buffer_array */
new Buffer(array)
new Buffer(arrayBuffer[, byteOffset[, length]])
new Buffer(buffer)
new Buffer(string[, encoding])
new Buffer(size)
/* Use Instead */
Buffer.from(array)
Buffer.from(arrayBuffer[, byteOffset[, length]])
Buffer.from(buffer)
Buffer.from(string[, encoding])
Buffer.alloc(size)
Buffer.allocUnsafe(size)
```
methods are added to the instance). See https://github.com/feross/buffer for more details. Additionally, unlike native `Buffer`, `Filer.Buffer`'s constructor can accept `ArrayBuffer` objects, which will be interpreted as `Uint8Array`s.
#### Filer.Path<a name="FilerPath"></a>
The node.js [path module](http://nodejs.org/api/path.html) is available via `Filer.path` or
`Filer.Path` (both are supported for historical reasons, and to match node). The Filer `path`
module is identical to the node.js version (see [https://github.com/browserify/path-browserify](https://github.com/browserify/path-browserify)), with the following differences:
* The CWD always defaults to `/`
* No support for Windows style paths (assume you are on a POSIX system)
* Additional utility methods (see below)
The node.js [path module](http://nodejs.org/api/path.html) is available via the `Filer.Path` object. It is
identical to the node.js version with the following differences:
* No notion of a current working directory in `resolve` (the root dir is used instead)
```javascript
var path = Filer.path;
var path = Filer.Path;
var dir = path.dirname('/foo/bar/baz/asdf/quux');
// dir is now '/foo/bar/baz/asdf'
@ -341,37 +220,16 @@ var newpath = path.join('/foo', 'bar', 'baz/asdf', 'quux', '..');
```
For more info see the docs in the [path module](http://nodejs.org/api/path.html) for a particular method:
* `path.normalize(p)` - NOTE: Filer.Path.normalize does *not* add a trailing slash
* `path.normalize(p)`
* `path.join([path1], [path2], [...])`
* `path.resolve([from ...], to)`
* `path.relative(from, to)`
* `path.dirname(p)`
* `path.basename(p, [ext])` - NOTE: Filer.Path.basename will return `'/'` vs. `''`
* `path.basename(p, [ext])`
* `path.extname(p)`
* `path.sep`
* `path.delimiter`
Filer.Path also includes the following extra methods:
* `isNull(p)` returns `true` or `false` if the path contains a null character (`'\u0000'`)
* `addTrailing(p)` returns the path `p` with a single trailing slash added
* `removeTrailing(p)` returns the path `p` with trailing slash(es) removed
[As with node.js](https://nodejs.org/api/fs.html#fs_file_paths), all methods below that
accept a `path` argument as a `String` can also take a [`file://` URL](https://nodejs.org/api/fs.html#fs_url_object_support)
or a `Buffer`. For example, all of the following cases will work the same way with Filer:
```js
// 1. path as a String
fs.writeFile('/dir/file.txt', 'data', function(err) {...});
// 2. path as a URL
fs.writeFile(new URL('file:///dir/file.txt'), 'data', function(err) {...});
// 3. path as a Buffer
fs.writeFile(Buffer.from('/dir/file.txt'), 'data', function(err) {...});
```
#### Filer.Errors<a name="Errors"></a>
The error objects used internally by Filer are also exposed via the `Filer.Errors` object. As much as possible
@ -414,11 +272,7 @@ Once a `FileSystem` is created, it has the following methods. NOTE: code example
a `FileSystem` instance named `fs` has been created like so:
```javascript
// 1. Using Filer.fs for a default filesystem
const { fs } = require('filer');
// 2. Or via the FileSystem constructor with specified options
const fs = new Filer.FileSystem(options, callback);
var fs = new Filer.FileSystem();
```
* [fs.rename(oldPath, newPath, callback)](#rename)
@ -436,16 +290,10 @@ const fs = new Filer.FileSystem(options, callback);
* [fs.mknod(path, mode, callback)](#mknod)
* [fs.rmdir(path, callback)](#rmdir)
* [fs.mkdir(path, [mode], callback)](#mkdir)
* [fs.access(path, [mode], callback)](#access)
* [fs.mkdtemp(path, [options], callback)](#mkdtemp)
* [fs.readdir(path, callback)](#readdir)
* [fs.close(fd, callback)](#close)
* [fs.open(path, flags, [mode], callback)](#open)
* [fs.utimes(path, atime, mtime, callback)](#utimes)
* [fs.chown(path, uid, gid, callback)](#chown)
* [fs.fchown(fd, uid, gid, callback)](#fchown)
* [fs.chmod(path, mode, callback)](#chmod)
* [fs.fchmod(fd, mode, callback)](#fchmod)
* [fs.futimes(fd, atime, mtime, callback)](#fsutimes)
* [fs.fsync(fd, callback)](#fsync)
* [fs.write(fd, buffer, offset, length, position, callback)](#write)
@ -487,7 +335,7 @@ Example:
```javascript
// Create a file, shrink it, expand it.
var buffer = Filer.Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
var buffer = new Filer.Buffer([1, 2, 3, 4, 5, 6, 7, 8]);
fs.open('/myfile', 'w', function(err, fd) {
if(err) throw error;
@ -518,7 +366,7 @@ Example:
```javascript
// Create a file, shrink it, expand it.
var buffer = Filer.Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
var buffer = new Filer.Buffer([1, 2, 3, 4, 5, 6, 7, 8]);
fs.open('/myfile', 'w', function(err, fd) {
if(err) throw error;
@ -551,20 +399,12 @@ Callback gets `(error, stats)`, where `stats` is an object with the following pr
{
node: <string> // internal node id (unique)
dev: <string> // file system name
name: <string> // the entry's name (basename)
size: <number> // file size in bytes
nlinks: <number> // number of links
atime: <date> // last access time as JS Date Object
mtime: <date> // last modified time as JS Date Object
ctime: <date> // creation time as JS Date Object
atimeMs: <number> // last access time as Unix Timestamp
mtimeMs: <number> // last modified time as Unix Timestamp
ctimeMs: <number> // creation time as Unix Timestamp
type: <string> // file type (FILE, DIRECTORY, SYMLINK),
gid: <number> // group name
uid: <number> // owner name
mode: <number> // permissions
version: <number> // version of the node
atime: <number> // last access time
mtime: <number> // last modified time
ctime: <number> // creation time
type: <string> // file type (FILE, DIRECTORY, SYMLINK)
}
```
@ -696,12 +536,10 @@ Create a symbolic link to the file at `dstPath` containing the path `srcPath`. A
Symbolic links are files that point to other paths.
NOTE: Filer allows for, but ignores the optional `type` parameter used in node.js.
The `srcPath` may be a relative path, which will be resolved relative to `dstPath`
Example:
```javascript
// Absolute path
fs.symlink('/logs/august.log', '/logs/current', function(err) {
if(err) throw err;
fs.readFile('/logs/current', 'utf8', function(err, data) {
@ -709,21 +547,11 @@ fs.symlink('/logs/august.log', '/logs/current', function(err) {
var currentLog = data;
});
});
// Relative path
fs.symlink('../file', '/dir/symlink', function(err) {
if(err) throw err;
// The /dir/symlink file is now a symlink to /file
});
```
#### fs.readlink(path, callback)<a name="readlink"></a>
Reads the contents of a symbolic link. Asynchronous [readlink(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/readlink.html).
Callback gets `(error, linkContents)`, where `linkContents` is a string
containing the symbolic link's link path. If the original `srcPath` given
to `symlink()` was a relative path, it will be fully resolved relative
to `dstPath` when returned by `readlink()`.
Reads the contents of a symbolic link. Asynchronous [readlink(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/readlink.html). Callback gets `(error, linkContents)`, where `linkContents` is a string containing the symbolic link's link path.
Example:
@ -820,47 +648,7 @@ fs.mkdir('/home', function(err) {
});
```
#### fs.access(path, [mode], callback)<a name="access"></a>
Tests a user's permissions for the file or directory supplied in `path` argument. Asynchronous [access(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/access.html). Callback gets no additional arguments. The `mode` argument can be one of the following (constants are available on `fs.constants` and `fs`):
* `F_OK`: Test for existence of file.
* `R_OK`: Test whether the file exists and grants read permission.
* `W_OK`: Test whether the file exists and grants write permission.
* `X_OK`: Test whether the file exists and grants execute permission.
NOTE: you can also create a mask consisting of the bitwise OR of two or more values (e.g. `fs.constants.W_OK | fs.constants.R_OK`).
Example:
```javascript
// Check if the file exists in the current directory.
fs.access(file, fs.F_OK, function(err) {
console.log(`${file} ${err ? 'does not exist' : 'exists'}`);
});
```
#### fs.mkdtemp(prefix, options, callback)<a name="mkdtemp"></a>
Makes a temporary directory with prefix supplied in `path` argument. Method will append six random characters directly to the prefix. Asynchronous. Callback gets `(error, path)`, where path is the path to the created directory.
NOTE: Filer allows for, but ignores the optional `options` argument used in node.js.
Example:
```javascript
// Create tmp directory with prefix foo
fs.mkdtemp("/foo-", function (error, path) {
// A new folder foo-xxxxxx will be created. Path contains a path to created folder.
});
fs.mkdtemp("/myDir/tmp", function (error, path) {
// Will create a new folder tmpxxxxxx inside myDir directory.
// Will throw error if myDir does not exist
});
```
#### fs.readdir(path, [options], callback)<a name="readdir"></a>
#### fs.readdir(path, callback)<a name="readdir"></a>
Reads the contents of a directory. Asynchronous [readdir(3)](http://pubs.opengroup.org/onlinepubs/009695399/functions/readdir.html).
Callback gets `(error, files)`, where `files` is an array containing the names of each directory entry (i.e., file, directory, link) in the directory, excluding `.` and `..`.
@ -881,12 +669,6 @@ fs.readdir('/docs', function(err, files) {
});
```
Optionally accepts an options parameter, which can be either an encoding (e.g. "utf8") or an object with optional properties `encoding` and `withFileTypes`.
The `encoding` property is a `string` which will determine the character encoding to use for the names of each directory entry. The `withFileTypes` property is a `boolean` which defaults to `false`. If `true`, this method will return an array of [fs.Dirent](https://nodejs.org/api/fs.html#fs_class_fs_dirent) objects.
The `name` property on the [fs.Dirent](https://nodejs.org/api/fs.html#fs_class_fs_dirent) objects will be encoded using the specified character encoding.
#### fs.close(fd, callback)<a name="close"></a>
Closes a file descriptor. Asynchronous [close(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/close.html).
@ -934,7 +716,7 @@ fs.open('/myfile', 'w', function(err, fd) {
#### fs.utimes(path, atime, mtime, callback)<a name="utimes"></a>
Changes the file timestamps for the file given at path `path`. Asynchronous [utimes(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/utimes.html). Callback gets no additional arguments. Both `atime` (access time) and `mtime` (modified time) arguments should be a JavaScript Date or Number.
Changes the file timestamps for the file given at path `path`. Asynchronous [utimes(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/utimes.html). Callback gets no additional arguments. Both `atime` (access time) and `mtime` (modified time) arguments should be a JavaScript Date.
Example:
@ -948,7 +730,7 @@ fs.utimes('/myfile.txt', now, now, function(err) {
#### fs.futimes(fd, atime, mtime, callback)<a name="futimes"></a>
Changes the file timestamps for the open file represented by the file descriptor `fd`. Asynchronous [utimes(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/utimes.html). Callback gets no additional arguments. Both `atime` (access time) and `mtime` (modified time) arguments should be a JavaScript Date or Number.
Changes the file timestamps for the open file represented by the file descriptor `fd`. Asynchronous [utimes(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/utimes.html). Callback gets no additional arguments. Both `atime` (access time) and `mtime` (modified time) arguments should be a JavaScript Date.
Example:
@ -967,107 +749,9 @@ fs.open('/myfile.txt', function(err, fd) {
});
```
#### fs.chown(path, uid, gid, callback)<a name="chown"></a>
Changes the owner and group of a file. Asynchronous [chown(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/chown.html). Callback gets no additional arguments. Both `uid` (user id) and `gid` (group id) arguments should be a JavaScript Number. By default, `0x0` is used (i.e., `root:root` ownership).
Example:
```javascript
fs.chown('/myfile.txt', 500, 500, function(err) {
if(err) throw err;
// /myfile.txt is now owned by user with id 500, group 500
});
```
#### fs.fchown(fd, uid, gid, callback)<a name="fchown"></a>
Changes the owner and group of a file. Asynchronous [chown(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/chown.html). Callback gets no additional arguments. Both `uid` (user id) and `gid` (group id) arguments should be a JavaScript Number. By default, `0x0` is used (i.e., `root:root` ownership).
Example:
```javascript
fs.open('/myfile.txt', function(err, fd) {
if(err) throw err;
fs.fchown(fd, 500, 500, function(err) {
if(err) throw err;
// /myfile.txt is now owned by user with id 500, group 500
fs.close(fd);
});
});
```
#### fs.chmod(path, mode, callback)<a name="chmod"></a>
Changes the mode of a file. Asynchronous [chmod(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/chmod.html). Callback gets no additional arguments. The `mode` argument should be a JavaScript Number, which combines file type and permission information. Here are a list of common values useful for setting the `mode`:
* File type `S_IFREG=0x8000`
* Dir type `S_IFDIR=0x4000`
* Link type `S_IFLNK=0xA000`
* Permissions `755=0x1ED`
* Permissions `644=0x1A4`
* Permissions `777=0x1FF`
* Permissions `666=0x1B6`
By default, directories use `(0x4000 | 0x1ED)` and files use `(0x8000 | 0x1A4)`.
Example:
```javascript
// S_IFREG | 0o777
var mode = 0x8000 | 0x1FF
fs.chmod('/myfile.txt', mode, function(err) {
if(err) throw err;
// /myfile.txt is a regular file with permissions 777
});
```
#### fs.fchmod(fd, mode, callback)<a name="fchmod"></a>
Changes the mode of a file. Asynchronous [chmod(2)](http://pubs.opengroup.org/onlinepubs/009695399/functions/chmod.html). Callback gets no additional arguments. The `mode` argument should be a JavaScript Number, which combines file type and permission information. By default, `755` (dir) and `644` (file) are used.
Example:
```javascript
fs.open('/myfile.txt', function(err, fd) {
if(err) throw err;
// S_IFREG | 0o777
var mode = 0x8000 | 0x1FF
fs.fchmod(fd, mode, function(err) {
if(err) throw err;
// /myfile.txt is a regular file with permissions 777
fs.close(fd);
});
});
```
#### fs.fsync(fd, callback)<a name="fsync"></a>
Synchronize the data and metadata for the file referred to by `fd` to disk.
Asynchronous [fsync(2)](http://man7.org/linux/man-pages/man2/fsync.2.html).
The callback gets `(error)`.
```js
fs.open('/myfile', 'r', function(error, fd) {
if(err) throw err;
// Use fd, then sync
fs.fsync(fd, function(error) {
if(err) throw err;
fs.close(fd, done);
});
});
```
NOTE: Not yet implemented, see https://github.com/filerjs/filer/issues/87
#### fs.write(fd, buffer, offset, length, position, callback)<a name="write"></a>
@ -1079,7 +763,7 @@ Example:
```javascript
// Create a file with the following bytes.
var buffer = Filer.Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
var buffer = new Filer.Buffer([1, 2, 3, 4, 5, 6, 7, 8]);
fs.open('/myfile', 'w', function(err, fd) {
if(err) throw error;
@ -1116,22 +800,22 @@ Example:
```javascript
fs.open('/myfile', 'r', function(err, fd) {
if(err) throw err;
if(err) throw error;
// Determine size of file
fs.fstat(fd, function(err, stats) {
if(err) throw err;
if(err) throw error;
// Create a buffer large enough to hold the file's contents
var nbytes = expected = stats.size;
var buffer = Filer.Buffer.alloc(nbytes);
var buffer = new Filer.Buffer(nbytes);
var read = 0;
function readBytes(offset, position, length) {
length = length || buffer.length - read;
fs.read(fd, buffer, offset, length, position, function(err, nbytes) {
if(err) throw err;
if(err) throw error;
// nbytes is now the number of bytes read, between 0 and buffer.length.
// See if we still have more bytes to read.
@ -1182,7 +866,7 @@ fs.writeFile('/myfile.txt', "...data...", function (err) {
});
// Write binary file
var buffer = Filer.Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
var buffer = new Filer.Buffer([1, 2, 3, 4, 5, 6, 7, 8]);
fs.writeFile('/myfile', buffer, function (err) {
if (err) throw err;
});
@ -1205,8 +889,8 @@ fs.appendFile('/myfile.txt', "Data...", function (err) {
// '/myfile.txt' would now read out 'More...Data...'
// Append binary file
var data = Filer.Buffer.from([1, 2, 3, 4]);
var more = Filer.Buffer.from([5, 6, 7, 8]);
var data = new Filer.Buffer([1, 2, 3, 4]);
var more = new Filer.Buffer([5, 6, 7, 8]);
fs.writeFile('/myfile', data, function (err) {
if (err) throw err;
@ -1579,8 +1263,17 @@ sh.find('/app/user', {
#### sh.ls(dir, [options], callback)<a name="ls"></a>
Get the listing of a directory, returning an array of directory entries
in the same form as [fs.stat()](#stat), with the exception that a new Array named
`contents` is added for directory entries, containing child entries.
in the following form:
```
{
path: <String> the basename of the directory entry
links: <Number> the number of links to the entry
size: <Number> the size in bytes of the entry
modified: <Number> the last modified date/time
type: <String> the type of the entry
contents: <Array> an optional array of child entries, if this entry is itself a directory
}
```
By default `sh.ls()` gives a shallow listing. If you want to follow
directories as they are encountered, use the `recursive=true` option. NOTE:

15
bower.json Normal file
View File

@ -0,0 +1,15 @@
{
"name": "filer",
"version": "0.0.44",
"main": "dist/filer.js",
"ignore": [
"build",
"examples",
"package.json",
"tests",
"gruntfile.js",
"node_modules",
"src",
"tools"
]
}

5
config/environment.js Normal file
View File

@ -0,0 +1,5 @@
module.exports = (function() {
var habitat = require('habitat');
habitat.load();
return new habitat();
})();

1692
dist/buffer.js vendored Normal file

File diff suppressed because it is too large Load Diff

2
dist/buffer.min.js vendored Normal file

File diff suppressed because one or more lines are too long

10076
dist/filer-perf.js vendored Normal file

File diff suppressed because it is too large Load Diff

20701
dist/filer-test.js vendored Normal file

File diff suppressed because it is too large Load Diff

20321
dist/filer.js vendored

File diff suppressed because it is too large Load Diff

1
dist/filer.js.map vendored

File diff suppressed because one or more lines are too long

1
dist/filer.map vendored

File diff suppressed because one or more lines are too long

253
dist/filer.min.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

1
dist/filer.min.map vendored

File diff suppressed because one or more lines are too long

243
dist/path.js vendored Normal file
View File

@ -0,0 +1,243 @@
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.Path = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// Based on https://github.com/joyent/node/blob/41e53e557992a7d552a8e23de035f9463da25c99/lib/path.js
// resolves . and .. elements in a path array with directory names there
// must be no slashes, empty elements, or device names (c:\) in the array
// (so also no leading and trailing slashes - it does not distinguish
// relative and absolute paths)
function normalizeArray(parts, allowAboveRoot) {
// if the path tries to go above the root, `up` ends up > 0
var up = 0;
for (var i = parts.length - 1; i >= 0; i--) {
var last = parts[i];
if (last === '.') {
parts.splice(i, 1);
} else if (last === '..') {
parts.splice(i, 1);
up++;
} else if (up) {
parts.splice(i, 1);
up--;
}
}
// if the path is allowed to go above the root, restore leading ..s
if (allowAboveRoot) {
for (; up--; up) {
parts.unshift('..');
}
}
return parts;
}
// Split a filename into [root, dir, basename, ext], unix version
// 'root' is just a slash, or nothing.
var splitPathRe =
/^(\/?)([\s\S]+\/(?!$)|\/)?((?:\.{1,2}$|[\s\S]+?)?(\.[^.\/]*)?)$/;
var splitPath = function(filename) {
var result = splitPathRe.exec(filename);
return [result[1] || '', result[2] || '', result[3] || '', result[4] || ''];
};
// path.resolve([from ...], to)
function resolve() {
var resolvedPath = '',
resolvedAbsolute = false;
for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) {
// XXXfiler: we don't have process.cwd() so we use '/' as a fallback
var path = (i >= 0) ? arguments[i] : '/';
// Skip empty and invalid entries
if (typeof path !== 'string' || !path) {
continue;
}
resolvedPath = path + '/' + resolvedPath;
resolvedAbsolute = path.charAt(0) === '/';
}
// At this point the path should be resolved to a full absolute path, but
// handle relative paths to be safe (might happen when process.cwd() fails)
// Normalize the path
resolvedPath = normalizeArray(resolvedPath.split('/').filter(function(p) {
return !!p;
}), !resolvedAbsolute).join('/');
return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.';
}
// path.normalize(path)
function normalize(path) {
var isAbsolute = path.charAt(0) === '/',
trailingSlash = path.substr(-1) === '/';
// Normalize the path
path = normalizeArray(path.split('/').filter(function(p) {
return !!p;
}), !isAbsolute).join('/');
if (!path && !isAbsolute) {
path = '.';
}
/*
if (path && trailingSlash) {
path += '/';
}
*/
return (isAbsolute ? '/' : '') + path;
}
function join() {
var paths = Array.prototype.slice.call(arguments, 0);
return normalize(paths.filter(function(p, index) {
return p && typeof p === 'string';
}).join('/'));
}
// path.relative(from, to)
function relative(from, to) {
from = resolve(from).substr(1);
to = resolve(to).substr(1);
function trim(arr) {
var start = 0;
for (; start < arr.length; start++) {
if (arr[start] !== '') break;
}
var end = arr.length - 1;
for (; end >= 0; end--) {
if (arr[end] !== '') break;
}
if (start > end) return [];
return arr.slice(start, end - start + 1);
}
var fromParts = trim(from.split('/'));
var toParts = trim(to.split('/'));
var length = Math.min(fromParts.length, toParts.length);
var samePartsLength = length;
for (var i = 0; i < length; i++) {
if (fromParts[i] !== toParts[i]) {
samePartsLength = i;
break;
}
}
var outputParts = [];
for (var i = samePartsLength; i < fromParts.length; i++) {
outputParts.push('..');
}
outputParts = outputParts.concat(toParts.slice(samePartsLength));
return outputParts.join('/');
}
function dirname(path) {
var result = splitPath(path),
root = result[0],
dir = result[1];
if (!root && !dir) {
// No dirname whatsoever
return '.';
}
if (dir) {
// It has a dirname, strip trailing slash
dir = dir.substr(0, dir.length - 1);
}
return root + dir;
}
function basename(path, ext) {
var f = splitPath(path)[2];
// TODO: make this comparison case-insensitive on windows?
if (ext && f.substr(-1 * ext.length) === ext) {
f = f.substr(0, f.length - ext.length);
}
// XXXfiler: node.js just does `return f`
return f === "" ? "/" : f;
}
function extname(path) {
return splitPath(path)[3];
}
function isAbsolute(path) {
if(path.charAt(0) === '/') {
return true;
}
return false;
}
function isNull(path) {
if (('' + path).indexOf('\u0000') !== -1) {
return true;
}
return false;
}
// Make sure we don't double-add a trailing slash (e.g., '/' -> '//')
function addTrailing(path) {
return path.replace(/\/*$/, '/');
}
// Deal with multiple slashes at the end, one, or none
// and make sure we don't return the empty string.
function removeTrailing(path) {
path = path.replace(/\/*$/, '');
return path === '' ? '/' : path;
}
// XXXfiler: we don't support path.exists() or path.existsSync(), which
// are deprecated, and need a FileSystem instance to work. Use fs.stat().
module.exports = {
normalize: normalize,
resolve: resolve,
join: join,
relative: relative,
sep: '/',
delimiter: ':',
dirname: dirname,
basename: basename,
extname: extname,
isAbsolute: isAbsolute,
isNull: isNull,
// Non-node but useful...
addTrailing: addTrailing,
removeTrailing: removeTrailing
};
},{}]},{},[1])(1)
});

2
dist/path.min.js vendored Normal file
View File

@ -0,0 +1,2 @@
/*! filer 0.0.44 2017-05-25 */
!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{var b;b="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,b.Path=a()}}(function(){return function a(b,c,d){function e(g,h){if(!c[g]){if(!b[g]){var i="function"==typeof require&&require;if(!h&&i)return i(g,!0);if(f)return f(g,!0);var j=new Error("Cannot find module '"+g+"'");throw j.code="MODULE_NOT_FOUND",j}var k=c[g]={exports:{}};b[g][0].call(k.exports,function(a){var c=b[g][1][a];return e(c?c:a)},k,k.exports,a,b,c,d)}return c[g].exports}for(var f="function"==typeof require&&require,g=0;g<d.length;g++)e(d[g]);return e}({1:[function(a,b,c){function d(a,b){for(var c=0,d=a.length-1;d>=0;d--){var e=a[d];"."===e?a.splice(d,1):".."===e?(a.splice(d,1),c++):c&&(a.splice(d,1),c--)}if(b)for(;c--;c)a.unshift("..");return a}function e(){for(var a="",b=!1,c=arguments.length-1;c>=-1&&!b;c--){var e=c>=0?arguments[c]:"/";"string"==typeof e&&e&&(a=e+"/"+a,b="/"===e.charAt(0))}return a=d(a.split("/").filter(function(a){return!!a}),!b).join("/"),(b?"/":"")+a||"."}function f(a){var b="/"===a.charAt(0);"/"===a.substr(-1);return a=d(a.split("/").filter(function(a){return!!a}),!b).join("/"),a||b||(a="."),(b?"/":"")+a}function g(){var a=Array.prototype.slice.call(arguments,0);return f(a.filter(function(a,b){return a&&"string"==typeof a}).join("/"))}function h(a,b){function c(a){for(var b=0;b<a.length&&""===a[b];b++);for(var c=a.length-1;c>=0&&""===a[c];c--);return b>c?[]:a.slice(b,c-b+1)}a=e(a).substr(1),b=e(b).substr(1);for(var d=c(a.split("/")),f=c(b.split("/")),g=Math.min(d.length,f.length),h=g,i=0;g>i;i++)if(d[i]!==f[i]){h=i;break}for(var j=[],i=h;i<d.length;i++)j.push("..");return j=j.concat(f.slice(h)),j.join("/")}function i(a){var b=q(a),c=b[0],d=b[1];return c||d?(d&&(d=d.substr(0,d.length-1)),c+d):"."}function j(a,b){var c=q(a)[2];return b&&c.substr(-1*b.length)===b&&(c=c.substr(0,c.length-b.length)),""===c?"/":c}function k(a){return q(a)[3]}function l(a){return"/"===a.charAt(0)?!0:!1}function m(a){return-1!==(""+a).indexOf("\x00")?!0:!1}function n(a){return a.replace(/\/*$/,"/")}function o(a){return a=a.replace(/\/*$/,""),""===a?"/":a}var p=/^(\/?)([\s\S]+\/(?!$)|\/)?((?:\.{1,2}$|[\s\S]+?)?(\.[^.\/]*)?)$/,q=function(a){var b=p.exec(a);return[b[1]||"",b[2]||"",b[3]||"",b[4]||""]};b.exports={normalize:f,resolve:e,join:g,relative:h,sep:"/",delimiter:":",dirname:i,basename:j,extname:k,isAbsolute:l,isNull:m,addTrailing:n,removeTrailing:o}},{}]},{},[1])(1)});

View File

@ -1,9 +1,14 @@
###
# Dev ENVIRONMENT file
#
# Copy to .env to use defaults when releasing via `npm release`
# Copy to .env to use defaults
###
# GitHub Personal Access Token (to push releases)
# https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/
GITHUB_TOKEN=
# GIT (upstream) url to publish to
export FILER_UPSTREAM_URI="git@github.com:js-platform/filer.git"
# GIT (upstream) branch to publish to
export FILER_UPSTREAM_BRANCH="develop"
# Remote name for upstream repo
export FILER_UPSTREAM_REMOTE_NAME="origin"

385
gruntfile.js Normal file
View File

@ -0,0 +1,385 @@
var semver = require('semver'),
fs = require('fs'),
currentVersion = JSON.parse(fs.readFileSync('./package.json', 'utf8')).version,
env = require('./config/environment');
// Globals
var PROMPT_CONFIRM_CONFIG = 'confirmation',
GIT_BRANCH = env.get('FILER_UPSTREAM_BRANCH'),
GIT_REMOTE = env.get('FILER_UPSTREAM_REMOTE_NAME'),
GIT_FULL_REMOTE = env.get('FILER_UPSTREAM_URI') + ' ' + GIT_BRANCH;
module.exports = function(grunt) {
// Project configuration.
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
clean: ['dist/filer-test.js', 'dist/filer-issue225.js', 'dist/filer-perf.js'],
uglify: {
options: {
banner: '/*! <%= pkg.name %> <%= pkg.version %> <%= grunt.template.today("yyyy-mm-dd") %> */\n'
},
develop: {
src: 'dist/filer.js',
dest: 'dist/filer.min.js'
},
path: {
src: 'dist/path.js',
dest: 'dist/path.min.js'
},
buffer: {
src: 'dist/buffer.js',
dest: 'dist/buffer.min.js'
}
},
jshint: {
// Don't bother with src/path.js
all: [
'gruntfile.js',
'config/environment.js',
'src/constants.js',
'src/errors.js',
'src/fs.js',
'src/index.js',
'src/shared.js',
'src/shell.js',
'src/fswatcher.js',
'src/environment.js',
'src/providers/**/*.js',
'src/adapters/**/*.js',
'src/directory-entry.js',
'src/open-file-description.js',
'src/super-node.js',
'src/node.js',
'src/stats.js',
'src/filesystem/**/*.js'
]
},
browserify: {
filerDist: {
src: "./src/index.js",
dest: "./dist/filer.js",
options: {
browserifyOptions: {
commondir: false,
builtins: ["buffer", "path", "url", "punycode", "querystring"],
insertGlobalVars: {
// This ensures that process won't be defined, since
// browserify will do so automatically if any globals
// are requested by us or detected by browserify.
process: function() {
return undefined;
}
},
standalone: 'Filer'
},
exclude: ["./node_modules/request/index.js"]
}
},
filerPerf: {
src: "./perf/index.js",
dest: "./dist/filer-perf.js",
options: {
browserifyOptions: {
commondir: false,
builtins: ["buffer", "path", "url", "punycode", "querystring"],
insertGlobalVars: {
// This ensures that process won't be defined, since
// browserify will do so automatically if any globals
// are requested by us or detected by browserify.
process: function() {
return undefined;
}
},
standalone: 'Filer'
}
}
},
filerTest: {
src: "./tests/index.js",
dest: "./dist/filer-test.js",
options: {
browserifyOptions: {
commondir: false,
builtins: ["buffer", "path", "url", "punycode", "querystring"],
insertGlobalVars: {
// This ensures that process won't be defined, since
// browserify will do so automatically if any globals
// are requested by us or detected by browserify.
process: function() {
return undefined;
}
}
}
}
},
// See tests/bugs/issue225.js
filerIssue225: {
src: "./src/index.js",
dest: "./dist/filer-issue225.js",
options: {
browserifyOptions: {
commondir: false,
builtins: ["buffer", "path", "url", "punycode", "querystring"],
insertGlobalVars: {
// This ensures that process won't be defined, since
// browserify will do so automatically if any globals
// are requested by us or detected by browserify.
process: function() {
return undefined;
}
},
standalone: 'Filer'
}
}
},
// For low-cost access to filer's `Path` and `buffer` modules
filerPath: {
src: "./src/path.js",
dest: "./dist/path.js",
options: {
browserifyOptions: {
standalone: 'Path'
}
}
},
filerBuffer: {
src: "./src/buffer.js",
dest: "./dist/buffer.js",
options: {
browserifyOptions: {
standalone: 'FilerBuffer'
}
}
}
},
shell: {
mocha: {
// Run all tests (e.g., tests require()'ed in tests/index.js) and also tests/bugs/issue225.js
// separately, since it can't be included in a browserify build.
command: '"./node_modules/.bin/mocha" --reporter list tests/index.js && "./node_modules/.bin/mocha" --reporter list tests/bugs/issue225.js'
}
},
bump: {
options: {
files: ['package.json', 'bower.json'],
commit: true,
commitMessage: 'v%VERSION%',
commitFiles: [
'package.json',
'bower.json',
'./dist/filer.js',
'./dist/filer.min.js',
'./dist/buffer.js',
'./dist/buffer.min.js',
'./dist/path.js',
'./dist/path.min.js'
],
createTag: true,
tagName: 'v%VERSION%',
tagMessage: 'v%VERSION%',
push: true,
pushTo: GIT_FULL_REMOTE
}
},
'npm-checkBranch': {
options: {
branch: GIT_BRANCH
}
},
'npm-publish': {
options: {
abortIfDirty: false
}
},
prompt: {
confirm: {
options: {
questions: [
{
config: PROMPT_CONFIRM_CONFIG,
type: 'confirm',
message: 'Bump version from ' + (currentVersion).cyan +
' to ' + semver.inc(currentVersion, "patch").yellow + '?',
default: false
}
],
then: function(results) {
if (!results[PROMPT_CONFIRM_CONFIG]) {
return grunt.fatal('User aborted...');
}
}
}
}
},
gitcheckout: {
publish: {
options: {
branch: 'gh-pages',
force: true
}
},
revert: {
options: {
branch: GIT_BRANCH
}
}
},
gitpush: {
publish: {
options: {
remote: GIT_REMOTE,
branch: 'gh-pages',
force: true
},
}
},
gitcommit: {
publish: {
options: {
noStatus: true
}
}
},
gitadd: {
publish: {
files: {
src: ['./dist/filer-test.js', './dist/filer-perf.js']
}
}
},
gitstash: {
publish: {
},
pop: {
options: {
command: "pop"
}
}
},
gitrm: {
publish: {
options: {
force: true
},
files: {
src: ['./dist/filer-test.js', './dist/filer-perf.js']
}
}
},
connect: {
serverForBrowser: {
options: {
port: 1234,
base: './',
keepalive: true
}
}
},
usebanner: {
publish: {
options: {
position: "top"
},
files: {
src: ['./dist/filer-test.js', './dist/filer-perf.js']
}
}
}
});
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-connect');
grunt.loadNpmTasks('grunt-bump');
grunt.loadNpmTasks('grunt-npm');
grunt.loadNpmTasks('grunt-git');
grunt.loadNpmTasks('grunt-prompt');
grunt.loadNpmTasks('grunt-shell');
grunt.loadNpmTasks('grunt-contrib-connect');
grunt.loadNpmTasks('grunt-browserify');
grunt.loadNpmTasks('grunt-banner');
grunt.registerTask('develop', [
'browserify:filerDist',
'browserify:filerPath',
'browserify:filerBuffer',
'uglify:develop',
'uglify:path',
'uglify:buffer'
]);
grunt.registerTask('build-tests', ['clean', 'browserify:filerTest', 'browserify:filerPerf', 'browserify:filerIssue225']);
grunt.registerTask('release', ['test', 'develop']);
grunt.registerTask('publish', 'Publish filer as a new version to NPM, bower and github.', function(patchLevel) {
var allLevels = ['patch', 'minor', 'major'];
// No level specified defaults to 'patch'
patchLevel = (patchLevel || 'patch').toLowerCase();
// Fail out if the patch level isn't recognized
if (allLevels.filter(function(el) { return el == patchLevel; }).length === 0) {
return grunt.fatal('Patch level not recognized! "Patch", "minor" or "major" only.');
}
// Set prompt message
var promptOpts = grunt.config('prompt.confirm.options');
promptOpts.questions[0].message = 'Bump version from ' + (currentVersion).cyan +
' to ' + semver.inc(currentVersion, patchLevel).yellow + '?';
grunt.config('prompt.confirm.options', promptOpts);
// Store the new version in the gh-pages commit message
var ghPagesMessage = 'Tests for Filer v' + semver.inc(currentVersion, patchLevel);
grunt.config('gitcommit.publish.options.message', ghPagesMessage);
// Store the new version as a banner in the test file
// NOTE: This is a hack intended to ensure that this build process
// succeeds even if no changes were made to the tests
// before publishing a new version. Otherwise, the automatic
// commit + push to github pages would break a normal build
var bannerMsg = "/* Test file for filerjs v" + semver.inc(currentVersion, patchLevel) + "*/";
grunt.config('usebanner.publish.options.banner', bannerMsg);
grunt.task.run([
'prompt:confirm',
'checkBranch',
'release',
'bump:' + patchLevel,
'build-tests',
'usebanner:publish',
'gitadd:publish',
'gitstash:publish',
'gitcheckout:publish',
'gitrm:publish',
'gitstash:pop',
'gitcommit:publish',
'gitpush:publish',
'gitcheckout:revert',
'npm-publish'
]);
});
grunt.registerTask('test-node', ['jshint', 'browserify:filerIssue225', 'shell:mocha']);
grunt.registerTask('test-browser', ['jshint', 'build-tests', 'connect:serverForBrowser']);
grunt.registerTask('test', ['test-node']);
grunt.registerTask('default', ['test']);
};

View File

@ -1,28 +0,0 @@
module.exports = function(config) {
config.set({
singleRun: true,
basePath: '',
files: [
'node_modules/regenerator-runtime/runtime.js',
'tests/dist/index.js'
],
frameworks: ['mocha', 'chai'],
reporters: ['mocha', 'summary'],
client: {
captureConsole: true,
mocha: {
ui: 'bdd',
timeout: 5000,
slow: 250
}
},
summaryReporter: {
// 'failed', 'skipped' or 'all'
show: 'failed',
// Limit the spec label to this length
specLength: 50,
// Show an 'all' column as a summary
overviewColumn: true
}
});
};

97
lib/nodash.js Normal file
View File

@ -0,0 +1,97 @@
// Cherry-picked bits of underscore.js, lodash.js
/**
* Lo-Dash 2.4.0 <http://lodash.com/>
* Copyright 2012-2013 The Dojo Foundation <http://dojofoundation.org/>
* Based on Underscore.js 1.5.2 <http://underscorejs.org/LICENSE>
* Copyright 2009-2013 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
* Available under MIT license <http://lodash.com/license>
*/
var ArrayProto = Array.prototype;
var nativeForEach = ArrayProto.forEach;
var nativeIndexOf = ArrayProto.indexOf;
var nativeSome = ArrayProto.some;
var ObjProto = Object.prototype;
var hasOwnProperty = ObjProto.hasOwnProperty;
var nativeKeys = Object.keys;
var breaker = {};
function has(obj, key) {
return hasOwnProperty.call(obj, key);
}
var keys = nativeKeys || function(obj) {
if (obj !== Object(obj)) throw new TypeError('Invalid object');
var keys = [];
for (var key in obj) if (has(obj, key)) keys.push(key);
return keys;
};
function size(obj) {
if (obj == null) return 0;
return (obj.length === +obj.length) ? obj.length : keys(obj).length;
}
function identity(value) {
return value;
}
function each(obj, iterator, context) {
var i, length;
if (obj == null) return;
if (nativeForEach && obj.forEach === nativeForEach) {
obj.forEach(iterator, context);
} else if (obj.length === +obj.length) {
for (i = 0, length = obj.length; i < length; i++) {
if (iterator.call(context, obj[i], i, obj) === breaker) return;
}
} else {
var keys = keys(obj);
for (i = 0, length = keys.length; i < length; i++) {
if (iterator.call(context, obj[keys[i]], keys[i], obj) === breaker) return;
}
}
};
function any(obj, iterator, context) {
iterator || (iterator = identity);
var result = false;
if (obj == null) return result;
if (nativeSome && obj.some === nativeSome) return obj.some(iterator, context);
each(obj, function(value, index, list) {
if (result || (result = iterator.call(context, value, index, list))) return breaker;
});
return !!result;
};
function contains(obj, target) {
if (obj == null) return false;
if (nativeIndexOf && obj.indexOf === nativeIndexOf) return obj.indexOf(target) != -1;
return any(obj, function(value) {
return value === target;
});
};
function Wrapped(value) {
this.value = value;
}
Wrapped.prototype.has = function(key) {
return has(this.value, key);
};
Wrapped.prototype.contains = function(target) {
return contains(this.value, target);
};
Wrapped.prototype.size = function() {
return size(this.value);
};
function nodash(value) {
// don't wrap if already wrapped, even if wrapped by a different `lodash` constructor
return (value && typeof value == 'object' && !Array.isArray(value) && hasOwnProperty.call(value, '__wrapped__'))
? value
: new Wrapped(value);
}
module.exports = nodash;

13903
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -5,91 +5,50 @@
"fs",
"node",
"file",
"filesystem",
"system",
"browser",
"indexeddb",
"idb"
"idb",
"websql"
],
"version": "1.4.1",
"version": "0.0.44",
"author": "Alan K <ack@modeswitch.org> (http://blog.modeswitch.org)",
"homepage": "http://filerjs.github.io/filer",
"bugs": "https://github.com/filerjs/filer/issues",
"license": "BSD-2-Clause",
"license": "BSD",
"scripts": {
"eslint": "npm run lint",
"eslint:fix": "npm run lint:fix",
"lint": "eslint src tests",
"lint:fix": "eslint --fix src tests",
"test:node": "mocha --timeout 5000 tests",
"pretest:node-debug": "echo \"Open Chrome to chrome://inspect to debug tests...\"",
"test:node-debug": "mocha --timeout 5000 --inspect-brk tests",
"test:manual": "parcel tests/index.html --out-dir tests/dist",
"test:migrations": "mocha tests/filesystems/migrations",
"pretest": "npm run lint",
"test": "npm run karma-mocha",
"posttest": "npm run test:migrations",
"prebuild": "parcel build --global Filer src/index.js --no-minify --out-file filer.js",
"build": "parcel build --global Filer src/index.js --out-file filer.min.js --detailed-report",
"build-tests": "parcel build tests/index.js --no-source-maps --out-dir tests/dist",
"prekarma-mocha-firefox": "npm run build-tests",
"karma-mocha-firefox": "karma start karma.conf.js --browsers FirefoxHeadless",
"prekarma-mocha-chrome": "npm run build-tests",
"karma-mocha-chrome": "karma start karma.conf.js --browsers ChromeHeadless",
"prekarma-mocha": "npm run build-tests",
"karma-mocha": "karma start karma.conf.js --browsers ChromeHeadless,FirefoxHeadless",
"coverage": "nyc mocha tests/index.js",
"release": "run.env release-it"
"test": "grunt test"
},
"repository": {
"type": "git",
"url": "https://github.com/filerjs/filer.git"
},
"dependencies": {
"es6-promisify": "^7.0.0",
"minimatch": "^3.0.4",
"schema-utils": "^3.1.1"
"bower": "~1.3.8",
"base64-arraybuffer": "^0.1.2",
"minimatch": "^1.0.0"
},
"devDependencies": {
"regenerator-runtime": "^0.13.9",
"chai": "^4.3.4",
"chai-datetime": "^1.8.0",
"eslint": "^7.32.0",
"fake-indexeddb": "^3.1.7",
"karma": "^6.3.8",
"karma-chai": "^0.1.0",
"karma-chrome-launcher": "^3.1.0",
"karma-firefox-launcher": "^2.1.2",
"karma-mocha": "^2.0.1",
"karma-mocha-reporter": "^2.2.5",
"karma-summary-reporter": "^3.0.0",
"meow": "^10.0.1",
"mocha": "^9.1.3",
"nyc": "^15.1.0",
"parcel-bundler": "^1.12.5",
"pretty-bytes": "^5.6.0",
"release-it": "^14.11.6",
"run.env": "^1.1.0",
"unused-filename": "^3.0.1",
"walk": "^2.3.15"
"chai": "~1.9.1",
"grunt": "~0.4.0",
"grunt-banner": "^0.2.3",
"grunt-browserify": "^3.8.0",
"grunt-bump": "0.0.13",
"grunt-contrib-clean": "~0.4.0",
"grunt-contrib-compress": "~0.4.1",
"grunt-contrib-concat": "~0.1.3",
"grunt-contrib-connect": "^0.10.1",
"grunt-contrib-jshint": "~0.7.1",
"grunt-contrib-uglify": "^0.9.1",
"grunt-contrib-watch": "~0.3.1",
"grunt-git": "0.3.3",
"grunt-npm": "git://github.com/sedge/grunt-npm.git#branchcheck",
"grunt-prompt": "^1.1.0",
"grunt-shell": "~0.7.0",
"habitat": "^1.1.0",
"mocha": "~1.18.2",
"requirejs": "^2.1.14",
"semver": "^2.3.0"
},
"main": "./src/index.js",
"browser": "./dist/filer.min.js",
"files": [
"src",
"lib",
"dist",
"shims",
"webpack"
],
"nyc": {
"exclude": [
"tests/**/*.js",
"lib/**/*.js",
"src/providers/**/*.js"
],
"reporter": [
"lcov",
"text"
]
}
"main": "./src/index.js"
}

View File

@ -1,3 +1,4 @@
var Filer = require('..');
var util = require('../tests/lib/test-utils.js');
function setImmediate(cb) {

View File

@ -1,65 +0,0 @@
const { FileSystem } = require('../src/index');
let Provider;
try {
Provider = require('fsProvider');
}
catch (err) {
Provider = require('./providers/default');
}
const provider = new Provider();
let onFsReady;
let onFsError;
let fsReady = new Promise((resolve, reject) => {
onFsReady = resolve;
onFsError = reject;
});
var fsInstance = new FileSystem({ provider }, (err) => {
if (err) {
onFsError(err);
} else {
onFsReady(true);
}
});
function proxyHasProp(target, prop) {
return prop in target;
}
const fsPromises = new Proxy(fsInstance.promises, {
get(target, prop) {
if (!proxyHasProp(target, prop)) {
return;
}
return async (...args) => {
await fsReady;
return await target[prop](...args);
};
},
});
const fs = new Proxy(fsInstance, {
get(target, prop) {
if (!proxyHasProp(target, prop)) {
return;
}
if (prop === 'promises') {
return fsPromises;
}
return (...args) => {
(async () => {
await fsReady;
target[prop](...args);
})();
};
},
});
module.exports = fs;

View File

@ -1,3 +0,0 @@
const { path } = require('../src/index');
module.exports = path;

View File

@ -1,2 +0,0 @@
const { Default } = require('../../src/providers/index');
module.exports = Default;

View File

@ -1,2 +0,0 @@
const IndexedDB = require('../../src/providers/indexeddb');
module.exports = IndexedDB;

View File

@ -1,2 +0,0 @@
const Memory = require('../../src/providers/memory');
module.exports = Memory;

23
src/buffer.js Normal file
View File

@ -0,0 +1,23 @@
function FilerBuffer (subject, encoding, nonZero) {
// Automatically turn ArrayBuffer into Uint8Array so that underlying
// Buffer code doesn't just throw away and ignore ArrayBuffer data.
if (subject instanceof ArrayBuffer) {
subject = new Uint8Array(subject);
}
return new Buffer(subject, encoding, nonZero);
};
// Inherit prototype from Buffer
FilerBuffer.prototype = Object.create(Buffer.prototype);
FilerBuffer.prototype.constructor = FilerBuffer;
// Also copy static methods onto FilerBuffer ctor
Object.keys(Buffer).forEach(function (p) {
if (Buffer.hasOwnProperty(p)) {
FilerBuffer[p] = Buffer[p];
}
});
module.exports = FilerBuffer;

View File

@ -15,20 +15,14 @@ module.exports = {
IDB_RO: 'readonly',
IDB_RW: 'readwrite',
WSQL_VERSION: '1',
WSQL_VERSION: "1",
WSQL_SIZE: 5 * 1024 * 1024,
WSQL_DESC: 'FileSystem Storage',
WSQL_DESC: "FileSystem Storage",
NODE_TYPE_FILE: 'FILE',
NODE_TYPE_DIRECTORY: 'DIRECTORY',
NODE_TYPE_SYMBOLIC_LINK: 'SYMLINK',
NODE_TYPE_META: 'META',
DEFAULT_DIR_PERMISSIONS: 0x1ED, // 755
DEFAULT_FILE_PERMISSIONS: 0x1A4, // 644
FULL_READ_WRITE_EXEC_PERMISSIONS: 0x1FF, // 777
READ_WRITE_PERMISSIONS: 0x1B6, /// 666
MODE_FILE: 'FILE',
MODE_DIRECTORY: 'DIRECTORY',
MODE_SYMBOLIC_LINK: 'SYMLINK',
MODE_META: 'META',
SYMLOOP_MAX: 10,
@ -82,49 +76,5 @@ module.exports = {
ENVIRONMENT: {
TMP: '/tmp',
PATH: ''
},
// Duplicate Node's fs.constants
fsConstants: {
O_RDONLY: 0,
O_WRONLY: 1,
O_RDWR: 2,
S_IFMT: 61440,
S_IFREG: 32768,
S_IFDIR: 16384,
S_IFCHR: 8192,
S_IFBLK: 24576,
S_IFIFO: 4096,
S_IFLNK: 40960,
S_IFSOCK: 49152,
O_CREAT: 512,
O_EXCL: 2048,
O_NOCTTY: 131072,
O_TRUNC: 1024,
O_APPEND: 8,
O_DIRECTORY: 1048576,
O_NOFOLLOW: 256,
O_SYNC: 128,
O_DSYNC: 4194304,
O_SYMLINK: 2097152,
O_NONBLOCK: 4,
S_IRWXU: 448,
S_IRUSR: 256,
S_IWUSR: 128,
S_IXUSR: 64,
S_IRWXG: 56,
S_IRGRP: 32,
S_IWGRP: 16,
S_IXGRP: 8,
S_IRWXO: 7,
S_IROTH: 4,
S_IWOTH: 2,
S_IXOTH: 1,
F_OK: 0,
R_OK: 4,
W_OK: 2,
X_OK: 1,
UV_FS_COPYFILE_EXCL: 1,
COPYFILE_EXCL: 1
}
};

View File

@ -1,6 +1,6 @@
var NODE_TYPE_FILE = require('./constants.js').NODE_TYPE_FILE;
var MODE_FILE = require('./constants.js').MODE_FILE;
module.exports = function DirectoryEntry(id, type) {
this.id = id;
this.type = type || NODE_TYPE_FILE;
this.type = type || MODE_FILE;
};

View File

@ -1,12 +0,0 @@
'use strict';
const Stats = require('./stats.js');
function Dirent(path, fileNode, devName) {
this.constructor = Dirent;
Stats.call(this, path, fileNode, devName);
}
Dirent.prototype = Stats.prototype;
module.exports = Dirent;

13
src/encoding.js Normal file
View File

@ -0,0 +1,13 @@
// Adapt encodings to work with Buffer or Uint8Array, they expect the latter
function decode(buf) {
return buf.toString('utf8');
}
function encode(string) {
return new Buffer(string, 'utf8');
}
module.exports = {
encode: encode,
decode: decode
};

View File

@ -7,7 +7,7 @@ var errors = {};
//'0:OK:success',
//'1:EOF:end of file',
//'2:EADDRINFO:getaddrinfo error',
'3:EACCES:permission denied',
//'3:EACCES:permission denied',
//'4:EAGAIN:resource temporarily unavailable',
//'5:EADDRINUSE:address already in use',
//'6:EADDRNOTAVAIL:address not available',

File diff suppressed because it is too large Load Diff

View File

@ -1,39 +1,35 @@
'use strict';
var _ = require('../../lib/nodash.js');
const { promisify } = require('es6-promisify');
var isNullPath = require('../path.js').isNull;
var nop = require('../shared.js').nop;
const Path = require('../path.js');
var Constants = require('../constants.js');
var FILE_SYSTEM_NAME = Constants.FILE_SYSTEM_NAME;
var FS_FORMAT = Constants.FS_FORMAT;
var FS_READY = Constants.FS_READY;
var FS_PENDING = Constants.FS_PENDING;
var FS_ERROR = Constants.FS_ERROR;
var FS_NODUPEIDCHECK = Constants.FS_NODUPEIDCHECK;
const providers = require('../providers/index.js');
var providers = require('../providers/index.js');
const Shell = require('../shell/shell.js');
const Intercom = require('../../lib/intercom.js');
const FSWatcher = require('../fs-watcher.js');
const Errors = require('../errors.js');
const {
nop,
guid: defaultGuidFn
} = require('../shared.js');
var Shell = require('../shell/shell.js');
var Intercom = require('../../lib/intercom.js');
var FSWatcher = require('../fs-watcher.js');
var Errors = require('../errors.js');
var defaultGuidFn = require('../shared.js').guid;
const {
fsConstants,
FILE_SYSTEM_NAME,
FS_FORMAT,
FS_READY,
FS_PENDING,
FS_ERROR,
FS_NODUPEIDCHECK,
STDIN,
STDOUT,
STDERR
} = require('../constants.js');
var STDIN = Constants.STDIN;
var STDOUT = Constants.STDOUT;
var STDERR = Constants.STDERR;
var FIRST_DESCRIPTOR = Constants.FIRST_DESCRIPTOR;
// The core fs operations live on impl
const impl = require('./implementation.js');
var impl = require('./implementation.js');
// node.js supports a calling pattern that leaves off a callback.
function maybeCallback(callback) {
if (typeof callback === 'function') {
if(typeof callback === "function") {
return callback;
}
return function(err) {
@ -46,65 +42,9 @@ function maybeCallback(callback) {
// Default callback that logs an error if passed in
function defaultCallback(err) {
if(err) {
/* eslint no-console: 0 */
console.error('Filer error: ', err);
}
}
// Get a path (String) from a file:// URL. Support URL() like objects
// https://github.com/nodejs/node/blob/968e901aff38a343b1de4addebf79fd8fa991c59/lib/internal/url.js#L1381
function toPathIfFileURL(fileURLOrPath) {
if (!(fileURLOrPath &&
fileURLOrPath.protocol &&
fileURLOrPath.pathname)) {
return fileURLOrPath;
}
if (fileURLOrPath.protocol !== 'file:') {
throw new Errors.EINVAL('only file: URLs are supported for paths', fileURLOrPath);
}
const pathname = fileURLOrPath.pathname;
for (let n = 0; n < pathname.length; n++) {
if (pathname[n] === '%') {
const third = pathname.codePointAt(n + 2) | 0x20;
if (pathname[n + 1] === '2' && third === 102) {
throw new Errors.EINVAL('file: URLs must not include encoded / characters', fileURLOrPath);
}
}
}
return decodeURIComponent(pathname);
}
// Allow Buffers for paths. Assumes we want UTF8.
function toPathIfBuffer(bufferOrPath) {
return Buffer.isBuffer(bufferOrPath) ? bufferOrPath.toString() : bufferOrPath;
}
function validatePath(path, allowRelative) {
if (!path) {
return new Errors.EINVAL('Path must be a string', path);
} else if (Path.isNull(path)) {
return new Errors.EINVAL('Path must be a string without null bytes.', path);
} else if (!allowRelative && !Path.isAbsolute(path)) {
return new Errors.EINVAL('Path must be absolute.', path);
}
}
function processPathArg(args, idx, allowRelative) {
let path = args[idx];
path = toPathIfFileURL(path);
path = toPathIfBuffer(path);
// Some methods specifically allow for rel paths (eg symlink with srcPath)
let err = validatePath(path, allowRelative);
if (err) {
throw err;
}
// Overwrite path arg with converted and validated path
args[idx] = path;
}
/**
* FileSystem
@ -138,14 +78,14 @@ function FileSystem(options, callback) {
options = options || {};
callback = callback || defaultCallback;
const flags = options.flags || [];
const guid = options.guid ? options.guid : defaultGuidFn;
const provider = options.provider || new providers.Default(options.name || FILE_SYSTEM_NAME);
var flags = options.flags;
var guid = options.guid ? options.guid : defaultGuidFn;
var provider = options.provider || new providers.Default(options.name || FILE_SYSTEM_NAME);
// If we're given a provider, match its name unless we get an explicit name
const name = options.name || provider.name;
const forceFormatting = flags.includes(FS_FORMAT);
var name = options.name || provider.name;
var forceFormatting = _(flags).contains(FS_FORMAT);
const fs = this;
var fs = this;
fs.readyState = FS_PENDING;
fs.name = name;
fs.error = null;
@ -154,25 +94,33 @@ function FileSystem(options, callback) {
fs.stdout = STDOUT;
fs.stderr = STDERR;
// Expose Node's fs.constants to users
fs.constants = fsConstants;
// Node also forwards the access mode flags onto fs
fs.F_OK = fsConstants.F_OK;
fs.R_OK = fsConstants.R_OK;
fs.W_OK = fsConstants.W_OK;
fs.X_OK = fsConstants.X_OK;
// Expose Shell constructor
this.Shell = Shell.bind(undefined, this);
// Safely expose the operation queue
let queue = [];
this.queueOrRun = function (operation) {
let error;
// Safely expose the list of open files and file
// descriptor management functions
var openFiles = {};
var nextDescriptor = FIRST_DESCRIPTOR;
Object.defineProperty(this, "openFiles", {
get: function() { return openFiles; }
});
this.allocDescriptor = function(openFileDescription) {
var fd = nextDescriptor ++;
openFiles[fd] = openFileDescription;
return fd;
};
this.releaseDescriptor = function(fd) {
delete openFiles[fd];
};
if (FS_READY === fs.readyState) {
// Safely expose the operation queue
var queue = [];
this.queueOrRun = function(operation) {
var error;
if(FS_READY == fs.readyState) {
operation.call(fs);
} else if (FS_ERROR === fs.readyState) {
} else if(FS_ERROR == fs.readyState) {
error = new Errors.EFILESYSTEMERROR('unknown error');
} else {
queue.push(operation);
@ -189,7 +137,7 @@ function FileSystem(options, callback) {
// We support the optional `options` arg from node, but ignore it
this.watch = function(filename, options, listener) {
if (Path.isNull(filename)) {
if(isNullPath(filename)) {
throw new Error('Path must be a string without null bytes.');
}
if(typeof options === 'function') {
@ -199,7 +147,7 @@ function FileSystem(options, callback) {
options = options || {};
listener = listener || nop;
const watcher = new FSWatcher();
var watcher = new FSWatcher();
watcher.start(filename, false, options.recursive);
watcher.on('change', listener);
@ -210,14 +158,14 @@ function FileSystem(options, callback) {
function wrappedGuidFn(context) {
return function(callback) {
// Skip the duplicate ID check if asked to
if (flags.includes(FS_NODUPEIDCHECK)) {
if(_(flags).contains(FS_NODUPEIDCHECK)) {
callback(null, guid());
return;
}
// Otherwise (default) make sure this id is unused first
function guidWithCheck(callback) {
const id = guid();
var id = guid();
context.getObject(id, function(err, value) {
if(err) {
callback(err);
@ -242,7 +190,7 @@ function FileSystem(options, callback) {
if(!changes.length) {
return;
}
const intercom = Intercom.getInstance();
var intercom = Intercom.getInstance();
changes.forEach(function(change) {
intercom.emit(change.event, change.path);
});
@ -252,15 +200,14 @@ function FileSystem(options, callback) {
provider.open(function(err) {
function complete(error) {
function wrappedContext(methodName) {
let context = provider[methodName]();
context.name = name;
var context = provider[methodName]();
context.flags = flags;
context.changes = [];
context.guid = wrappedGuidFn(context);
// When the context is finished, let the fs deal with any change events
context.close = function() {
let changes = context.changes;
var changes = context.changes;
broadcastChanges(changes);
changes.length = 0;
};
@ -294,7 +241,7 @@ function FileSystem(options, callback) {
return complete(err);
}
const context = provider.getReadWriteContext();
var context = provider.getReadWriteContext();
context.guid = wrappedGuidFn(context);
// Mount the filesystem, formatting if necessary
@ -311,92 +258,64 @@ function FileSystem(options, callback) {
impl.ensureRootDirectory(context, complete);
}
});
FileSystem.prototype.promises = {};
}
// Expose storage providers on FileSystem constructor
FileSystem.providers = providers;
/**
* Public API for FileSystem. All node.js methods that are exposed on fs.promises
* include `promise: true`. We also include our own extra methods, but skip the
* fd versions to match node.js, which puts these on a `FileHandle` object.
* Any method that deals with path argument(s) also includes the position of
* those args in one of `absPathArgs: [...]` or `relPathArgs: [...]`, so they
* can be processed and validated before being passed on to the method.
* Public API for FileSystem
*/
[
{ name: 'appendFile', promises: true, absPathArgs: [0] },
{ name: 'access', promises: true, absPathArgs: [0] },
{ name: 'chown', promises: true, absPathArgs: [0] },
{ name: 'chmod', promises: true, absPathArgs: [0] },
{ name: 'close' },
// copyFile - https://github.com/filerjs/filer/issues/436
{ name: 'exists', absPathArgs: [0] },
{ name: 'fchown' },
{ name: 'fchmod' },
// fdatasync - https://github.com/filerjs/filer/issues/653
{ name: 'fgetxattr' },
{ name: 'fremovexattr' },
{ name: 'fsetxattr' },
{ name: 'fstat' },
{ name: 'fsync' },
{ name: 'ftruncate' },
{ name: 'futimes' },
{ name: 'getxattr', promises: true, absPathArgs: [0] },
// lchown - https://github.com/filerjs/filer/issues/620
// lchmod - https://github.com/filerjs/filer/issues/619
{ name: 'link', promises: true, absPathArgs: [0, 1] },
{ name: 'lseek' },
{ name: 'lstat', promises: true },
{ name: 'mkdir', promises: true, absPathArgs: [0] },
{ name: 'mkdtemp', promises: true },
{ name: 'mknod', promises: true, absPathArgs: [0] },
{ name: 'open', promises: true, absPathArgs: [0] },
{ name: 'readdir', promises: true, absPathArgs: [0] },
{ name: 'read' },
{ name: 'readFile', promises: true, absPathArgs: [0] },
{ name: 'readlink', promises: true, absPathArgs: [0] },
// realpath - https://github.com/filerjs/filer/issues/85
{ name: 'removexattr', promises: true, absPathArgs: [0] },
{ name: 'rename', promises: true, absPathArgs: [0, 1] },
{ name: 'rmdir', promises: true, absPathArgs: [0] },
{ name: 'setxattr', promises: true, absPathArgs: [0] },
{ name: 'stat', promises: true, absPathArgs: [0] },
{ name: 'symlink', promises: true, relPathArgs: [0], absPathArgs: [1] },
{ name: 'truncate', promises: true, absPathArgs: [0] },
// unwatchFile - https://github.com/filerjs/filer/pull/553
{ name: 'unlink', promises: true, absPathArgs: [0] },
{ name: 'utimes', promises: true, absPathArgs: [0] },
// watch - implemented above in `this.watch`
// watchFile - https://github.com/filerjs/filer/issues/654
{ name: 'writeFile', promises: true, absPathArgs: [0] },
{ name: 'write' }
].forEach(function (method) {
const methodName = method.name;
const shouldPromisify = method.promises === true;
'open',
'close',
'mknod',
'mkdir',
'rmdir',
'stat',
'fstat',
'link',
'unlink',
'read',
'readFile',
'write',
'writeFile',
'appendFile',
'exists',
'lseek',
'readdir',
'rename',
'readlink',
'symlink',
'lstat',
'truncate',
'ftruncate',
'utimes',
'futimes',
'setxattr',
'getxattr',
'fsetxattr',
'fgetxattr',
'removexattr',
'fremovexattr'
].forEach(function(methodName) {
FileSystem.prototype[methodName] = function() {
const fs = this;
const args = Array.prototype.slice.call(arguments, 0);
const lastArgIndex = args.length - 1;
var fs = this;
var args = Array.prototype.slice.call(arguments, 0);
var lastArgIndex = args.length - 1;
// We may or may not get a callback, and since node.js supports
// fire-and-forget style fs operations, we have to dance a bit here.
const missingCallback = typeof args[lastArgIndex] !== 'function';
const callback = maybeCallback(args[lastArgIndex]);
var missingCallback = typeof args[lastArgIndex] !== 'function';
var callback = maybeCallback(args[lastArgIndex]);
// Deal with path arguments, validating and normalizing Buffer and file:// URLs
if (method.absPathArgs) {
method.absPathArgs.forEach(pathArg => processPathArg(args, pathArg, false));
}
if (method.relPathArgs) {
method.relPathArgs.forEach(pathArg => processPathArg(args, pathArg, true));
}
const error = fs.queueOrRun(function () {
const context = fs.provider.openReadWriteContext();
var error = fs.queueOrRun(function() {
var context = fs.provider.openReadWriteContext();
// Fail early if the filesystem is in an error state (e.g.,
// provider failed to open.
if(FS_ERROR === fs.readyState) {
const err = new Errors.EFILESYSTEMERROR('filesystem unavailable, operation canceled');
var err = new Errors.EFILESYSTEMERROR('filesystem unavailable, operation canceled');
return callback.call(fs, err);
}
@ -416,23 +335,13 @@ function FileSystem(options, callback) {
// Forward this call to the impl's version, using the following
// call signature, with complete() as the callback/last-arg now:
// fn(fs, context, arg0, arg1, ... , complete);
const fnArgs = [context].concat(args);
var fnArgs = [fs, context].concat(args);
impl[methodName].apply(null, fnArgs);
});
if(error) {
callback(error);
}
};
// Add to fs.promises if appropriate
if (shouldPromisify) {
FileSystem.prototype.promises[methodName] = promisify(FileSystem.prototype[methodName].bind(fs));
}
});
}
// Expose storage providers on FileSystem constructor
FileSystem.providers = providers;
module.exports = FileSystem;

View File

@ -1,8 +1,6 @@
'using strict';
const EventEmitter = require('../lib/eventemitter.js');
const Path = require('./path.js');
const Intercom = require('../lib/intercom.js');
var EventEmitter = require('../lib/eventemitter.js');
var Path = require('./path.js');
var Intercom = require('../lib/intercom.js');
/**
* FSWatcher based on node.js' FSWatcher
@ -10,10 +8,10 @@ const Intercom = require('../lib/intercom.js');
*/
function FSWatcher() {
EventEmitter.call(this);
const self = this;
let recursive = false;
let recursivePathPrefix;
let filename;
var self = this;
var recursive = false;
var recursivePathPrefix;
var filename;
function onchange(path) {
// Watch for exact filename, or parent path when recursive is true.
@ -48,12 +46,12 @@ function FSWatcher() {
recursivePathPrefix = filename === '/' ? '/' : filename + '/';
}
const intercom = Intercom.getInstance();
var intercom = Intercom.getInstance();
intercom.on('change', onchange);
};
self.close = function() {
const intercom = Intercom.getInstance();
var intercom = Intercom.getInstance();
intercom.off('change', onchange);
self.removeAllListeners('change');
};

View File

@ -1,34 +1,7 @@
let fs = null;
let Filer = null;
module.exports = Filer = {
module.exports = {
FileSystem: require('./filesystem/interface.js'),
Buffer: Buffer,
// We previously called this Path, but node calls it path. Do both
Buffer: require('./buffer.js'),
Path: require('./path.js'),
path: require('./path.js'),
Errors: require('./errors.js'),
Shell: require('./shell/shell.js'),
/**
* @deprecated Importing filer from your webpack config is not recommended.
*
* The filer `FilerWebpackPlugin` class is exposed directly.
*
* ```
* const { FilerWebpackPlugin } = require('filer/webpack');
* ```
*/
FilerWebpackPlugin: require('./webpack-plugin'),
Shell: require('./shell/shell.js')
};
// Add a getter for the `fs` instance, which returns
// a Filer FileSystem instance, using the default provider/flags.
Object.defineProperty(Filer, 'fs', {
enumerable: true,
get() {
if(!fs) {
fs = new Filer.FileSystem();
}
return fs;
}
});

View File

@ -1,60 +1,10 @@
const {
NODE_TYPE_FILE,
NODE_TYPE_DIRECTORY,
NODE_TYPE_SYMBOLIC_LINK,
DEFAULT_FILE_PERMISSIONS,
DEFAULT_DIR_PERMISSIONS
} = require('./constants');
const {
S_IFREG,
S_IFDIR,
S_IFLNK
} = require('./constants').fsConstants;
var MODE_FILE = require('./constants.js').MODE_FILE;
/**
* Make sure the options object has an id on property,
* either from caller or one we generate using supplied guid fn.
*/
function ensureID(options, prop, callback) {
if(options[prop]) {
return callback();
}
options.guid(function(err, id) {
if(err) {
return callback(err);
}
options[prop] = id;
callback();
});
}
/**
* Generate a POSIX mode (integer) for the node type and permissions.
* Use default permissions if we aren't passed any.
*/
function generateMode(nodeType, modePermissions) {
switch(nodeType) {
case NODE_TYPE_DIRECTORY:
return (modePermissions || DEFAULT_DIR_PERMISSIONS) | S_IFDIR;
case NODE_TYPE_SYMBOLIC_LINK:
return (modePermissions || DEFAULT_FILE_PERMISSIONS) | S_IFLNK;
case NODE_TYPE_FILE:
// falls through
default:
return (modePermissions || DEFAULT_FILE_PERMISSIONS) | S_IFREG;
}
}
/**
* Common properties for the layout of a Node
*/
class Node {
constructor(options) {
function Node(options) {
var now = Date.now();
this.id = options.id;
this.data = options.data; // id for data object
this.mode = options.mode || MODE_FILE; // node type (file, directory, etc)
this.size = options.size || 0; // size (bytes for files, entries for directories)
this.atime = options.atime || now; // access time (will mirror ctime after creation)
this.ctime = options.ctime || now; // creation/change time
@ -62,74 +12,42 @@ class Node {
this.flags = options.flags || []; // file flags
this.xattrs = options.xattrs || {}; // extended attributes
this.nlinks = options.nlinks || 0; // links count
this.version = options.version || 0; // node version
this.blksize = undefined; // block size
this.nblocks = 1; // blocks count
this.data = options.data; // id for data object
}
// Historically, Filer's node layout has referred to the
// node type as `mode`, and done so using a String. In
// a POSIX filesystem, the mode is a number that combines
// both node type and permission bits. Internal we use `type`,
// but store it in the database as `mode` for backward
// compatibility.
if(typeof options.type === 'string') {
this.type = options.type;
} else if(typeof options.mode === 'string') {
this.type = options.mode;
// Make sure the options object has an id on property,
// either from caller or one we generate using supplied guid fn.
function ensureID(options, prop, callback) {
if(options[prop]) {
callback(null);
} else {
this.type = NODE_TYPE_FILE;
}
// Extra mode permissions and ownership info
this.permissions = options.permissions || generateMode(this.type);
this.uid = options.uid || 0x0; // owner name
this.gid = options.gid || 0x0; // group name
}
/**
* Serialize a Node to JSON. Everything is as expected except
* that we use `mode` for `type` to maintain backward compatibility.
*/
toJSON() {
return {
id: this.id,
data: this.data,
size: this.size,
atime: this.atime,
ctime: this.ctime,
mtime: this.ctime,
flags: this.flags,
xattrs: this.xattrs,
nlinks: this.nlinks,
// Use `mode` for `type` to keep backward compatibility
mode: this.type,
permissions: this.permissions,
uid: this.uid,
gid: this.gid
};
}
// Return complete POSIX `mode` for node type + permissions. See:
// http://man7.org/linux/man-pages/man2/chmod.2.html
get mode() {
return generateMode(this.type, this.permissions);
}
// When setting the `mode` we assume permissions bits only (not changing type)
set mode(value) {
this.permissions = value;
options.guid(function(err, id) {
options[prop] = id;
callback(err);
});
}
}
module.exports.create = function create(options, callback) {
Node.create = function(options, callback) {
// We expect both options.id and options.data to be provided/generated.
ensureID(options, 'id', function(err) {
if(err) {
return callback(err);
callback(err);
return;
}
ensureID(options, 'data', function(err) {
if(err) {
return callback(err);
callback(err);
return;
}
callback(null, new Node(options));
});
});
};
module.exports = Node;

View File

@ -1,5 +1,4 @@
const Errors = require('./errors.js');
const Node = require('./node');
var Errors = require('./errors.js');
function OpenFileDescription(path, id, flags, position) {
this.path = path;
@ -23,7 +22,7 @@ OpenFileDescription.prototype.getNode = function(context, callback) {
return callback(new Errors.EBADF('file descriptor refers to unknown node', path));
}
Node.create(node, callback);
callback(null, node);
}
context.getObject(id, check_if_node_exists);

View File

@ -1,44 +0,0 @@
const { FIRST_DESCRIPTOR } = require('./constants');
const openFiles = {};
/**
* Start at FIRST_DESCRIPTOR and go until we find
* an empty file descriptor, then return it.
*/
const getEmptyDescriptor = () => {
let fd = FIRST_DESCRIPTOR;
while(getOpenFileDescription(fd)) {
fd++;
}
return fd;
};
/**
* Look up the open file description object for a given
* file descriptor.
*/
const getOpenFileDescription = ofd => openFiles[ofd];
/**
* Allocate a new file descriptor for the given
* open file description.
*/
const allocDescriptor = openFileDescription => {
const ofd = getEmptyDescriptor();
openFiles[ofd] = openFileDescription;
return ofd;
};
/**
* Release the given existing file descriptor created
* with allocDescriptor().
*/
const releaseDescriptor = ofd => delete openFiles[ofd];
module.exports = {
allocDescriptor,
releaseDescriptor,
getOpenFileDescription
};

View File

@ -1,49 +1,239 @@
/**
* Patch process to add process.cwd(), always giving the root dir.
* NOTE: this line needs to happen *before* we require in `path`.
*/
process.cwd = () => '/';
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
/**
* https://github.com/browserify/path-browserify via Parcel.
* We use is as a base for our own Filer.Path, and patch/add
* a few things we need for the browser environment.
*/
const nodePath = require('path');
const filerPath = Object.assign({}, nodePath);
// Based on https://github.com/joyent/node/blob/41e53e557992a7d552a8e23de035f9463da25c99/lib/path.js
/**
* Patch path.basename() to return / vs. ''
*/
filerPath.basename = (path, ext) => {
const basename = nodePath.basename(path, ext);
return basename === '' ? '/' : basename;
// resolves . and .. elements in a path array with directory names there
// must be no slashes, empty elements, or device names (c:\) in the array
// (so also no leading and trailing slashes - it does not distinguish
// relative and absolute paths)
function normalizeArray(parts, allowAboveRoot) {
// if the path tries to go above the root, `up` ends up > 0
var up = 0;
for (var i = parts.length - 1; i >= 0; i--) {
var last = parts[i];
if (last === '.') {
parts.splice(i, 1);
} else if (last === '..') {
parts.splice(i, 1);
up++;
} else if (up) {
parts.splice(i, 1);
up--;
}
}
// if the path is allowed to go above the root, restore leading ..s
if (allowAboveRoot) {
for (; up--; up) {
parts.unshift('..');
}
}
return parts;
}
// Split a filename into [root, dir, basename, ext], unix version
// 'root' is just a slash, or nothing.
var splitPathRe =
/^(\/?)([\s\S]+\/(?!$)|\/)?((?:\.{1,2}$|[\s\S]+?)?(\.[^.\/]*)?)$/;
var splitPath = function(filename) {
var result = splitPathRe.exec(filename);
return [result[1] || '', result[2] || '', result[3] || '', result[4] || ''];
};
/**
* Patch path.normalize() to not add a trailing /
*/
filerPath.normalize = (path) => {
path = nodePath.normalize(path);
return path === '/' ? path : filerPath.removeTrailing(path);
};
// path.resolve([from ...], to)
function resolve() {
var resolvedPath = '',
resolvedAbsolute = false;
/**
* Add new utility method isNull() to path: check for null paths.
*/
filerPath.isNull = path => ('' + path).indexOf('\u0000') !== -1;
for (var i = arguments.length - 1; i >= -1 && !resolvedAbsolute; i--) {
// XXXfiler: we don't have process.cwd() so we use '/' as a fallback
var path = (i >= 0) ? arguments[i] : '/';
/**
* Add new utility method addTrailing() to add trailing / without doubling to //.
*/
filerPath.addTrailing = path => path.replace(/\/*$/, '/');
// Skip empty and invalid entries
if (typeof path !== 'string' || !path) {
continue;
}
/**
* Add new utility method removeTrailing() to remove trailing /, dealing with multiple
resolvedPath = path + '/' + resolvedPath;
resolvedAbsolute = path.charAt(0) === '/';
}
// At this point the path should be resolved to a full absolute path, but
// handle relative paths to be safe (might happen when process.cwd() fails)
// Normalize the path
resolvedPath = normalizeArray(resolvedPath.split('/').filter(function(p) {
return !!p;
}), !resolvedAbsolute).join('/');
return ((resolvedAbsolute ? '/' : '') + resolvedPath) || '.';
}
// path.normalize(path)
function normalize(path) {
var isAbsolute = path.charAt(0) === '/',
trailingSlash = path.substr(-1) === '/';
// Normalize the path
path = normalizeArray(path.split('/').filter(function(p) {
return !!p;
}), !isAbsolute).join('/');
if (!path && !isAbsolute) {
path = '.';
}
/*
if (path && trailingSlash) {
path += '/';
}
*/
filerPath.removeTrailing = path => {
return (isAbsolute ? '/' : '') + path;
}
function join() {
var paths = Array.prototype.slice.call(arguments, 0);
return normalize(paths.filter(function(p, index) {
return p && typeof p === 'string';
}).join('/'));
}
// path.relative(from, to)
function relative(from, to) {
from = resolve(from).substr(1);
to = resolve(to).substr(1);
function trim(arr) {
var start = 0;
for (; start < arr.length; start++) {
if (arr[start] !== '') break;
}
var end = arr.length - 1;
for (; end >= 0; end--) {
if (arr[end] !== '') break;
}
if (start > end) return [];
return arr.slice(start, end - start + 1);
}
var fromParts = trim(from.split('/'));
var toParts = trim(to.split('/'));
var length = Math.min(fromParts.length, toParts.length);
var samePartsLength = length;
for (var i = 0; i < length; i++) {
if (fromParts[i] !== toParts[i]) {
samePartsLength = i;
break;
}
}
var outputParts = [];
for (var i = samePartsLength; i < fromParts.length; i++) {
outputParts.push('..');
}
outputParts = outputParts.concat(toParts.slice(samePartsLength));
return outputParts.join('/');
}
function dirname(path) {
var result = splitPath(path),
root = result[0],
dir = result[1];
if (!root && !dir) {
// No dirname whatsoever
return '.';
}
if (dir) {
// It has a dirname, strip trailing slash
dir = dir.substr(0, dir.length - 1);
}
return root + dir;
}
function basename(path, ext) {
var f = splitPath(path)[2];
// TODO: make this comparison case-insensitive on windows?
if (ext && f.substr(-1 * ext.length) === ext) {
f = f.substr(0, f.length - ext.length);
}
// XXXfiler: node.js just does `return f`
return f === "" ? "/" : f;
}
function extname(path) {
return splitPath(path)[3];
}
function isAbsolute(path) {
if(path.charAt(0) === '/') {
return true;
}
return false;
}
function isNull(path) {
if (('' + path).indexOf('\u0000') !== -1) {
return true;
}
return false;
}
// Make sure we don't double-add a trailing slash (e.g., '/' -> '//')
function addTrailing(path) {
return path.replace(/\/*$/, '/');
}
// Deal with multiple slashes at the end, one, or none
// and make sure we don't return the empty string.
function removeTrailing(path) {
path = path.replace(/\/*$/, '');
return path === '' ? '/' : path;
};
}
module.exports = filerPath;
// XXXfiler: we don't support path.exists() or path.existsSync(), which
// are deprecated, and need a FileSystem instance to work. Use fs.stat().
module.exports = {
normalize: normalize,
resolve: resolve,
join: join,
relative: relative,
sep: '/',
delimiter: ':',
dirname: dirname,
basename: basename,
extname: extname,
isAbsolute: isAbsolute,
isNull: isNull,
// Non-node but useful...
addTrailing: addTrailing,
removeTrailing: removeTrailing
};

View File

@ -1,8 +1,35 @@
const IndexedDB = require('./indexeddb.js');
const Memory = require('./memory.js');
var IndexedDB = require('./indexeddb.js');
var WebSQL = require('./websql.js');
var Memory = require('./memory.js');
module.exports = {
IndexedDB: IndexedDB,
WebSQL: WebSQL,
Memory: Memory,
/**
* Convenience Provider references
*/
// The default provider to use when none is specified
Default: IndexedDB,
Memory: Memory
// The Fallback provider does automatic fallback checks
Fallback: (function() {
if(IndexedDB.isSupported()) {
return IndexedDB;
}
if(WebSQL.isSupported()) {
return WebSQL;
}
function NotSupported() {
throw "[Filer Error] Your browser doesn't support IndexedDB or WebSQL.";
}
NotSupported.isSupported = function() {
return false;
};
return NotSupported;
}())
};

View File

@ -2,6 +2,13 @@ var FILE_SYSTEM_NAME = require('../constants.js').FILE_SYSTEM_NAME;
var FILE_STORE_NAME = require('../constants.js').FILE_STORE_NAME;
var IDB_RW = require('../constants.js').IDB_RW;
var IDB_RO = require('../constants.js').IDB_RO;
var Errors = require('../errors.js');
var FilerBuffer = require('../buffer.js');
var indexedDB = global.indexedDB ||
global.mozIndexedDB ||
global.webkitIndexedDB ||
global.msIndexedDB;
function IndexedDBContext(db, mode) {
this.db = db;
@ -58,7 +65,7 @@ IndexedDBContext.prototype.getBuffer = function(key, callback) {
if(err) {
return callback(err);
}
callback(null, Buffer.from(arrayBuffer));
callback(null, new FilerBuffer(arrayBuffer));
});
};
@ -82,7 +89,12 @@ IndexedDBContext.prototype.putObject = function(key, value, callback) {
this._put(key, value, callback);
};
IndexedDBContext.prototype.putBuffer = function(key, uint8BackedBuffer, callback) {
var buf = uint8BackedBuffer.buffer;
var buf;
if(!Buffer._useTypedArrays) { // workaround for fxos 1.3
buf = uint8BackedBuffer.toArrayBuffer();
} else {
buf = uint8BackedBuffer.buffer;
}
this._put(key, buf, callback);
};
@ -109,10 +121,6 @@ function IndexedDB(name) {
this.db = null;
}
IndexedDB.isSupported = function() {
var indexedDB = global.indexedDB ||
global.mozIndexedDB ||
global.webkitIndexedDB ||
global.msIndexedDB;
return !!indexedDB;
};
@ -125,11 +133,6 @@ IndexedDB.prototype.open = function(callback) {
}
try {
var indexedDB = global.indexedDB ||
global.mozIndexedDB ||
global.webkitIndexedDB ||
global.msIndexedDB;
// NOTE: we're not using versioned databases.
var openRequest = indexedDB.open(that.name);

View File

@ -9,7 +9,7 @@ var asyncCallback = require('../../lib/async.js').setImmediate;
var createDB = (function() {
var pool = {};
return function getOrCreate(name) {
if(!Object.prototype.hasOwnProperty.call(pool, name)) {
if(!pool.hasOwnProperty(name)) {
pool[name] = {};
}
return pool[name];
@ -24,7 +24,7 @@ function MemoryContext(db, readOnly) {
MemoryContext.prototype.clear = function(callback) {
if(this.readOnly) {
asyncCallback(function() {
callback('[MemoryContext] Error: write operation on read only context');
callback("[MemoryContext] Error: write operation on read only context");
});
return;
}
@ -49,7 +49,7 @@ MemoryContext.prototype.putBuffer =
function(key, value, callback) {
if(this.readOnly) {
asyncCallback(function() {
callback('[MemoryContext] Error: write operation on read only context');
callback("[MemoryContext] Error: write operation on read only context");
});
return;
}
@ -60,7 +60,7 @@ function(key, value, callback) {
MemoryContext.prototype.delete = function(key, callback) {
if(this.readOnly) {
asyncCallback(function() {
callback('[MemoryContext] Error: write operation on read only context');
callback("[MemoryContext] Error: write operation on read only context");
});
return;
}

171
src/providers/websql.js Normal file
View File

@ -0,0 +1,171 @@
var FILE_SYSTEM_NAME = require('../constants.js').FILE_SYSTEM_NAME;
var FILE_STORE_NAME = require('../constants.js').FILE_STORE_NAME;
var WSQL_VERSION = require('../constants.js').WSQL_VERSION;
var WSQL_SIZE = require('../constants.js').WSQL_SIZE;
var WSQL_DESC = require('../constants.js').WSQL_DESC;
var Errors = require('../errors.js');
var FilerBuffer = require('../buffer.js');
var base64ArrayBuffer = require('base64-arraybuffer');
function WebSQLContext(db, isReadOnly) {
var that = this;
this.getTransaction = function(callback) {
if(that.transaction) {
callback(that.transaction);
return;
}
// Either do readTransaction() (read-only) or transaction() (read/write)
db[isReadOnly ? 'readTransaction' : 'transaction'](function(transaction) {
that.transaction = transaction;
callback(transaction);
});
};
}
WebSQLContext.prototype.clear = function(callback) {
function onError(transaction, error) {
callback(error);
}
function onSuccess(transaction, result) {
callback(null);
}
this.getTransaction(function(transaction) {
transaction.executeSql("DELETE FROM " + FILE_STORE_NAME + ";",
[], onSuccess, onError);
});
};
function _get(getTransaction, key, callback) {
function onSuccess(transaction, result) {
// If the key isn't found, return null
var value = result.rows.length === 0 ? null : result.rows.item(0).data;
callback(null, value);
}
function onError(transaction, error) {
callback(error);
}
getTransaction(function(transaction) {
transaction.executeSql("SELECT data FROM " + FILE_STORE_NAME + " WHERE id = ? LIMIT 1;",
[key], onSuccess, onError);
});
}
WebSQLContext.prototype.getObject = function(key, callback) {
_get(this.getTransaction, key, function(err, result) {
if(err) {
return callback(err);
}
try {
if(result) {
result = JSON.parse(result);
}
} catch(e) {
return callback(e);
}
callback(null, result);
});
};
WebSQLContext.prototype.getBuffer = function(key, callback) {
_get(this.getTransaction, key, function(err, result) {
if(err) {
return callback(err);
}
// Deal with zero-length ArrayBuffers, which will be encoded as ''
if(result || result === '') {
var arrayBuffer = base64ArrayBuffer.decode(result);
result = new FilerBuffer(arrayBuffer);
}
callback(null, result);
});
};
function _put(getTransaction, key, value, callback) {
function onSuccess(transaction, result) {
callback(null);
}
function onError(transaction, error) {
callback(error);
}
getTransaction(function(transaction) {
transaction.executeSql("INSERT OR REPLACE INTO " + FILE_STORE_NAME + " (id, data) VALUES (?, ?);",
[key, value], onSuccess, onError);
});
}
WebSQLContext.prototype.putObject = function(key, value, callback) {
var json = JSON.stringify(value);
_put(this.getTransaction, key, json, callback);
};
WebSQLContext.prototype.putBuffer = function(key, uint8BackedBuffer, callback) {
var base64 = base64ArrayBuffer.encode(uint8BackedBuffer.buffer);
_put(this.getTransaction, key, base64, callback);
};
WebSQLContext.prototype.delete = function(key, callback) {
function onSuccess(transaction, result) {
callback(null);
}
function onError(transaction, error) {
callback(error);
}
this.getTransaction(function(transaction) {
transaction.executeSql("DELETE FROM " + FILE_STORE_NAME + " WHERE id = ?;",
[key], onSuccess, onError);
});
};
function WebSQL(name) {
this.name = name || FILE_SYSTEM_NAME;
this.db = null;
}
WebSQL.isSupported = function() {
return !!global.openDatabase;
};
WebSQL.prototype.open = function(callback) {
var that = this;
// Bail if we already have a db open
if(that.db) {
return callback();
}
var db = global.openDatabase(that.name, WSQL_VERSION, WSQL_DESC, WSQL_SIZE);
if(!db) {
callback("[WebSQL] Unable to open database.");
return;
}
function onError(transaction, error) {
if (error.code === 5) {
callback(new Errors.EINVAL('WebSQL cannot be accessed. If private browsing is enabled, disable it.'));
}
callback(error);
}
function onSuccess(transaction, result) {
that.db = db;
callback();
}
// Create the table and index we'll need to store the fs data.
db.transaction(function(transaction) {
function createIndex(transaction) {
transaction.executeSql("CREATE INDEX IF NOT EXISTS idx_" + FILE_STORE_NAME + "_id" +
" on " + FILE_STORE_NAME + " (id);",
[], onSuccess, onError);
}
transaction.executeSql("CREATE TABLE IF NOT EXISTS " + FILE_STORE_NAME + " (id unique, data TEXT);",
[], createIndex, onError);
});
};
WebSQL.prototype.getReadOnlyContext = function() {
return new WebSQLContext(this.db, true);
};
WebSQL.prototype.getReadWriteContext = function() {
return new WebSQLContext(this.db, false);
};
module.exports = WebSQL;

View File

@ -1,27 +1,26 @@
function generateRandom(template) {
return template.replace(/[xy]/g, function(c) {
var r = Math.random()*16|0, v = c === 'x' ? r : (r&0x3|0x8);
return v.toString(16);
});
}
function guid() {
return generateRandom('xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx').toUpperCase();
}
/**
* Generate a string of n random characters. Defaults to n=6.
*/
function randomChars(n) {
n = n || 6;
var template = 'x'.repeat(n);
return generateRandom(template);
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) {
var r = Math.random()*16|0, v = c == 'x' ? r : (r&0x3|0x8);
return v.toString(16);
}).toUpperCase();
}
function nop() {}
/**
* Convert a Uint8Array to a regular array
*/
function u8toArray(u8) {
var array = [];
var len = u8.length;
for(var i = 0; i < len; i++) {
array[i] = u8[i];
}
return array;
}
module.exports = {
guid: guid,
nop: nop,
randomChars: randomChars
u8toArray: u8toArray,
nop: nop
};

View File

@ -1,5 +1,4 @@
'use strict';
const defaults = require('../constants.js').ENVIRONMENT;
var defaults = require('../constants.js').ENVIRONMENT;
module.exports = function Environment(env) {
env = env || {};

View File

@ -1,8 +1,8 @@
var {promisify} = require('es6-promisify');
var Path = require('../path.js');
var Errors = require('../errors.js');
var Environment = require('./environment.js');
var async = require('../../lib/async.js');
var Encoding = require('../encoding.js');
var minimatch = require('minimatch');
function Shell(fs, options) {
@ -57,24 +57,6 @@ function Shell(fs, options) {
this.pwd = function() {
return cwd;
};
this.promises = {};
/**
* Public API for Shell converted to Promise based
*/
[
'cd',
'exec',
'touch',
'cat',
'ls',
'rm',
'tempDir',
'mkdirp',
'find'
].forEach((methodName)=>{
this.promises[methodName] = promisify(this[methodName].bind(this));
});
}
/**
@ -105,7 +87,7 @@ Shell.prototype.exec = function(path, args, callback) {
callback = callback || function(){};
path = Path.resolve(sh.pwd(), path);
fs.readFile(path, 'utf8', function(error, data) {
fs.readFile(path, "utf8", function(error, data) {
if(error) {
callback(error);
return;
@ -149,7 +131,7 @@ Shell.prototype.touch = function(path, options, callback) {
fs.utimes(path, atime, mtime, callback);
}
fs.stat(path, function(error) {
fs.stat(path, function(error, stats) {
if(error) {
if(options.updateOnly === true) {
callback();
@ -251,10 +233,16 @@ Shell.prototype.ls = function(dir, options, callback) {
callback(error);
return;
}
var entry = stats;
var entry = {
path: Path.basename(name),
links: stats.nlinks,
size: stats.size,
modified: stats.mtime,
type: stats.type
};
if(options.recursive && stats.type === 'DIRECTORY') {
list(Path.join(pathname, entry.name), function(error, items) {
list(Path.join(pathname, entry.path), function(error, items) {
if(error) {
callback(error);
return;
@ -366,7 +354,7 @@ Shell.prototype.tempDir = function(callback) {
// Try and create it, and it will either work or fail
// but either way it's now there.
fs.mkdir(tmp, function() {
fs.mkdir(tmp, function(err) {
callback(null, tmp);
});
};
@ -387,8 +375,7 @@ Shell.prototype.mkdirp = function(path, callback) {
callback(new Errors.EINVAL('Missing path argument'));
return;
}
path = Path.resolve(sh.pwd(), path);
if (path === '/') {
else if (path === '/') {
callback();
return;
}
@ -412,7 +399,7 @@ Shell.prototype.mkdirp = function(path, callback) {
var parent = Path.dirname(path);
if(parent === '/') {
fs.mkdir(path, function (err) {
if (err && err.code !== 'EEXIST') {
if (err && err.code != 'EEXIST') {
callback(err);
return;
}
@ -424,7 +411,7 @@ Shell.prototype.mkdirp = function(path, callback) {
_mkdirp(parent, function (err) {
if (err) return callback(err);
fs.mkdir(path, function (err) {
if (err && err.code !== 'EEXIST') {
if (err && err.code != 'EEXIST') {
callback(err);
return;
}

View File

@ -1,43 +1,26 @@
'use strict';
var Constants = require('./constants.js');
const Constants = require('./constants.js');
const Path = require('./path.js');
function dateFromMs(ms) {
return new Date(Number(ms));
}
function Stats(path, fileNode, devName) {
this.dev = devName;
function Stats(fileNode, devName) {
this.node = fileNode.id;
this.type = fileNode.type;
this.dev = devName;
this.size = fileNode.size;
this.nlinks = fileNode.nlinks;
// Date objects
this.atime = dateFromMs(fileNode.atime);
this.mtime = dateFromMs(fileNode.mtime);
this.ctime = dateFromMs(fileNode.ctime);
// Unix timestamp MS Numbers
this.atimeMs = fileNode.atime;
this.mtimeMs = fileNode.mtime;
this.ctimeMs = fileNode.ctime;
this.version = fileNode.version;
this.mode = fileNode.mode;
this.uid = fileNode.uid;
this.gid = fileNode.gid;
this.name = Path.basename(path);
this.atime = fileNode.atime;
this.mtime = fileNode.mtime;
this.ctime = fileNode.ctime;
this.type = fileNode.mode;
}
Stats.prototype.isFile = function() {
return this.type === Constants.NODE_TYPE_FILE;
return this.type === Constants.MODE_FILE;
};
Stats.prototype.isDirectory = function() {
return this.type === Constants.NODE_TYPE_DIRECTORY;
return this.type === Constants.MODE_DIRECTORY;
};
Stats.prototype.isSymbolicLink = function() {
return this.type === Constants.NODE_TYPE_SYMBOLIC_LINK;
return this.type === Constants.MODE_SYMBOLIC_LINK;
};
// These will always be false in Filer.

View File

@ -4,7 +4,7 @@ function SuperNode(options) {
var now = Date.now();
this.id = Constants.SUPER_NODE_ID;
this.type = Constants.NODE_TYPE_META;
this.mode = Constants.MODE_META;
this.atime = options.atime || now;
this.ctime = options.ctime || now;
this.mtime = options.mtime || now;

View File

@ -1,81 +0,0 @@
var path = require('path');
var utils = require('./utils');
const PLUGIN_NAME = 'filer-webpack-plugin';
const OPTIONS_SCHEMA = require('./schema');
const OPTIONS_PROCESSORS = require('./processors');
module.exports = class FilerWebpackPlugin {
constructor(options = {}) {
utils.validateOptions(options, OPTIONS_SCHEMA);
this.options = utils.processOptions(options, OPTIONS_PROCESSORS);
}
apply(compiler) {
compiler.hooks.normalModuleFactory.tap(
PLUGIN_NAME,
(factory) => {
factory.hooks.resolve.tap(
PLUGIN_NAME,
(resolveData) => {
// Resolve fsProvider if required
if (
resolveData.request === 'fsProvider'
&& resolveData.context === this.options.shimsDir
) {
return this.resolveFsProvider(resolveData);
}
// Ignore filer files (these should resolve modules normally)
if (resolveData.context.startsWith(this.options.filerDir)) return;
// Apply fs, path and buffer shims if required
switch (resolveData.request) {
case 'fs':
if (!this.options.shimFs) return;
return this.applyFsShim(resolveData);
case 'path':
if (!this.options.shimPath) return;
return this.applyPathShim(resolveData);
default:
return;
}
}
);
},
);
}
resolveFsProvider(resolveData) {
switch (this.options.fsProvider) {
case 'default':
resolveData.request = path.join(this.options.fsProviderDir, 'default.js');
break;
case 'indexeddb':
resolveData.request = path.join(this.options.fsProviderDir, 'indexeddb.js');
break;
case 'memory':
resolveData.request = path.join(this.options.fsProviderDir, 'memory.js');
break;
case 'custom':
resolveData.request = path.join(this.options.fsProviderDir, 'custom.js');
break;
default:
throw new Error([
'Invalid option for fsProvider.',
'fsProvider must be one of \'default\', \'indexeddb\', \'memory\' or \'custom\'.',
'If using a custom fsProvider, you must also provide the fsProviderDir option.'
].join(' '));
}
}
applyFsShim(resolveData) {
resolveData.request = path.join(this.options.shimsDir, 'fs.js');
}
applyPathShim(resolveData) {
resolveData.request = path.join(this.options.shimsDir, 'path.js');
}
};

View File

@ -1,34 +0,0 @@
var path = require('path');
const ROOT_DIR_TAG = '<rootDir>';
const CWD = process.cwd();
module.exports = {
filerDir: {
process: function(value) {
if (!value) {
return path.join(CWD, 'node_modules', 'filer');
}
return path.resolve(value.replace(ROOT_DIR_TAG, CWD));
},
},
shimsDir: {
process: function(value) {
if (!value) {
return path.join(CWD, 'node_modules', 'filer', 'shims');
}
return path.resolve(value.replace(ROOT_DIR_TAG, CWD));
}
},
fsProviderDir: {
process: function(value) {
if (!value) {
return path.join(CWD, 'node_modules', 'filer', 'shims', 'providers');
}
return path.resolve(value.replace(ROOT_DIR_TAG, CWD));
},
},
shimFs: { default: true },
shimPath: { default: true},
fsProvider: { default: 'default'},
};

View File

@ -1,23 +0,0 @@
module.exports = {
type: 'object',
properties: {
filerDir: {
type: 'string',
},
shimsDir: {
type: 'string',
},
shimFs: {
type: 'boolean',
},
shimPath: {
type: 'boolean',
},
fsProvider: {
type: 'string',
},
fsProviderDir: {
type: 'string',
},
}
};

View File

@ -1,26 +0,0 @@
var { validate } = require('schema-utils');
function validateOptions(options, schema) {
validate(schema, options);
}
function processOptions(options, processors) {
const processedOptions = {};
for (const [property, processor] of Object.entries(processors)) {
processedOptions[property] = options[property];
if (processedOptions[property] === undefined) {
processedOptions[property] = processor.default;
}
if (processor.process) {
processedOptions[property] = processor.process(processedOptions[property]);
}
}
return processedOptions;
}
module.exports = {
validateOptions,
processOptions,
};

View File

@ -1,3 +1,4 @@
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;

View File

@ -1,3 +1,4 @@
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;

44
tests/bugs/issue225.js Normal file
View File

@ -0,0 +1,44 @@
/**
* https://github.com/js-platform/filer/pull/225
*
* NOTE: this test has to be run outside the browserify step,
* since combinining require for node.js/browserify builds with
* r.js doesn't work.
*/
var requirejs = require('requirejs');
var expect = require('chai').expect;
// browser-request assumes access to XHR
GLOBAL.XMLHttpRequest = {};
describe('require.js should be able to use built Filer, issue 225', function() {
it('should properly load Filer as an AMD module, with Buffer included', function(done) {
requirejs.config({
baseUrl: __dirname,
paths: {
"filer": "../../dist/filer-issue225"
},
nodeRequire: require
});
requirejs(["filer"], function(Filer) {
expect(Filer).to.exist;
expect(Filer.Buffer).to.exist;
var fs = new Filer.FileSystem({provider: new Filer.FileSystem.providers.Memory()});
var buf = new Filer.Buffer([1, 2, 3]);
fs.writeFile('/file', buf, function(err) {
expect(err).not.to.exist;
fs.readFile('/file', function(err, data) {
expect(err).not.to.exist;
expect(data).to.deep.equal(buf);
done();
});
});
});
});
});

View File

@ -1,3 +1,4 @@
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;

View File

@ -1,3 +1,4 @@
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;

View File

@ -1,4 +1,4 @@
var Filer = require('../../src');
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;
@ -7,7 +7,7 @@ describe('Filer.Buffer should accept initialized ArrayBuffers, issue 249', funct
afterEach(util.cleanup);
it('should accept an ArrayBuffer with a specified size', function(done) {
var buffer = Buffer.from(new ArrayBuffer(5));
var buffer = new Filer.Buffer(new ArrayBuffer(5));
expect(buffer.length).to.equal(5);
done();
});
@ -18,7 +18,7 @@ describe('Filer.Buffer static methods are in tact, issue 249', function() {
afterEach(util.cleanup);
it('should proxy Buffer.isBuffer', function(done) {
expect(Filer.Buffer.isBuffer(Buffer.from([]))).to.equal(true);
expect(Filer.Buffer.isBuffer(new Filer.Buffer([]))).to.equal(true);
expect(Filer.Buffer.isBuffer('')).to.equal(false);
done();
});
@ -35,7 +35,7 @@ describe('Filer.Buffer static methods are in tact, issue 249', function() {
});
it('should proxy Buffer.concat', function(done) {
expect(Filer.Buffer.concat([Buffer.alloc(1), Buffer.alloc(2)]).length).to.equal(3);
expect(Filer.Buffer.concat([new Filer.Buffer(1), new Filer.Buffer(2)]).length).to.equal(3);
done();
});
});

View File

@ -1,3 +1,4 @@
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;

View File

@ -1,4 +1,5 @@
var Filer = require('../../src');
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;
var setImmediate = require('../../lib/async.js').setImmediate;
@ -52,7 +53,7 @@ describe('Queued operations should error when fs is in error state, issue 258',
it('should get EFILESYSTEMERROR errors on callbacks to queued operations on provider error', function(done) {
var errCount = 0;
var fs = new Filer.FileSystem({provider: provider}, function() {
var fs = new Filer.FileSystem({provider: provider}, function(err) {
// Do nothing
});

View File

@ -1,3 +1,4 @@
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;
@ -14,7 +15,6 @@ describe('fs.readdir on non-dir paths, issue 267', function() {
fs.readdir('/myfile.txt', function(err, contents) {
expect(err).to.exist;
expect(err.code).to.equal('ENOTDIR');
expect(contents).not.to.exist;
done();
});
});

View File

@ -1,3 +1,4 @@
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;
@ -5,15 +6,23 @@ describe('undefined and relative paths, issue270', function() {
beforeEach(util.setup);
afterEach(util.cleanup);
it('should fail with EINVAL when called on an undefined path', function() {
it('should fail with EINVAL when called on an undefined path', function(done) {
var fs = util.fs();
var fn = () => fs.writeFile(undefined, 'data');
expect(fn).to.throw();
fs.writeFile(undefined, 'data', function(err) {
expect(err).to.exist;
expect(err.code).to.equal('EINVAL');
done();
});
});
it('should fail with EINVAL when called on a relative path', function() {
it('should fail with EINVAL when called on a relative path', function(done) {
var fs = util.fs();
var fn = () => fs.writeFile('relpath/file.txt', 'data');
expect(fn).to.throw();
fs.writeFile('relpath/file.txt', 'data', function(err) {
expect(err).to.exist;
expect(err.code).to.equal('EINVAL');
done();
});
});
});

View File

@ -1,12 +1,12 @@
var Path = require('../../src').Path;
var Path = require('../..').Path;
var expect = require('chai').expect;
describe('Path.resolve does not work, issue357', function() {
it('Path.relative() should not crash', function() {
expect(Path.relative('/mydir', '/mydir/file')).to.equal('file');
expect(Path.relative("/mydir", "/mydir/file")).to.equal("file");
// https://nodejs.org/api/path.html#path_path_relative_from_to
expect(Path.relative('/data/orandea/test/aaa', '/data/orandea/impl/bbb')).to.equal('../../impl/bbb');
expect(Path.relative("/data/orandea/test/aaa", "/data/orandea/impl/bbb")).to.equal("../../impl/bbb");
});
it('Path.resolve() should work as expectedh', function() {

View File

@ -1,12 +0,0 @@
'use strict';
const util = require('../lib/test-utils.js');
describe('unexpected failures when calling fs functions (e.g. writeFile) with empty options object, issue 773', function() {
beforeEach(util.setup);
afterEach(util.cleanup);
it('should call fs.writeFile with an empty options object', function(done) {
const fs = util.fs();
fs.writeFile('/a', 'trololol', {}, done);
});
});

View File

@ -1,115 +0,0 @@
'use strict';
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;
describe('fs.readdir fails when passing options, issue775', function () {
beforeEach(util.setup);
afterEach(util.cleanup);
function setup(fs, dir, cb) {
fs.mkdir(dir, undefined, (err) => {
if (err) {
cb(err);
}
else {
fs.writeFile(dir + '/file', '', (err) => {
if (err) {
cb(err);
}
else {
fs.mkdir(dir + '/folder', (err) => {
if (err) {
cb(err);
}
else {
fs.symlink(dir + '/file', dir + '/symlink', (err) => {
if (err) {
cb(err);
}
else {
cb();
}
});
}
});
}
});
}
});
}
it('should create a directory, add a file, folder and symbolic link then call fs.readdir with buffer encoding', function (done) {
var fs = util.fs();
setup(fs, '/test_dir', (err) => {
if (err) {
done(err);
}
fs.readdir('/test_dir', 'buffer', (err, data) => {
if (err) {
done(err);
}
else {
expect(data).to.have.length(3);
expect(data[0].toString()).to.equal('file');
expect(data[1].toString()).to.equal('folder');
expect(data[2].toString()).to.equal('symlink');
done();
}
});
});
});
it('should create a directory, add a file, folder and symbolic link then call fs.readdir with withFileTypes and encoding options', function (done) {
var fs = util.fs();
setup(fs, '/test_dir', (err) => {
if (err) {
done(err);
}
fs.readdir('/test_dir', { encoding: 'base64', withFileTypes: true }, (err, data) => {
if (err) {
done(err);
}
else {
expect(data).to.have.length(3);
expect(Buffer.from(data[0].name, 'base64').toString()).to.equal('file');
expect(Buffer.from(data[1].name, 'base64').toString()).to.equal('folder');
expect(Buffer.from(data[2].name, 'base64').toString()).to.equal('symlink');
expect(data[0].isFile()).to.be.true;
expect(data[1].isDirectory()).to.be.true;
expect(data[2].isSymbolicLink()).to.be.true;
done();
}
});
});
});
it('should create a directory then call fs.readdir without options', function (done) {
var fs = util.fs();
setup(fs, '/test_dir', (err) => {
if (err) {
done(err);
}
else {
fs.readdir('/test_dir', (err, data) => {
if (err) {
done(err);
}
else {
expect(data).to.have.length(3);
expect(data[0]).to.equal('file');
expect(data[1]).to.equal('folder');
expect(data[2]).to.equal('symlink');
done();
}
});
}
});
});
});

View File

@ -1,12 +0,0 @@
'use strict';
const util = require('../lib/test-utils.js');
describe('fs.mkdir does not recursively create parent directories when called with { recursive: true }, issue776', function() {
beforeEach(util.setup);
afterEach(util.cleanup);
it.skip('should not throw when calling fs.mkdir with recursive flag set', function(done) {
const fs = util.fs();
fs.mkdir('/test_dir/a/b', { recursive: true }, done);
});
});

View File

@ -1,4 +1,4 @@
var Filer = require('../../src');
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;
var async = require('../../lib/async.js');
@ -46,7 +46,7 @@ describe('sh.ls and deep directory trees', function() {
}
async.eachSeries(paths, writeFile, function(err) {
if(err) throw err;
if(err) { console.log('error', err); throw err; }
sh.ls('/', {recursive: true}, function(err, listing) {
expect(err).not.to.exist;

View File

@ -1,3 +1,4 @@
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;

View File

@ -1,9 +0,0 @@
The following images are JSON filesystem images created with
`tools/fs-image.js`, and named with the filesystem dir used
as the source of the image, and the version of Filer used to
generate them:
1. `tiny-fs.0.43.json` was created from `tests/filesystem/tiny-fs/` with https://github.com/filerjs/filer/blob/d66114e20c7f0235698d9933dbd90217ba86fa4e/dist/filer.min.js
If you need to create a new image, use `tools/get-filer-version.js` to
get a specific version of Filer, then `tools/fs-image.js` to generate an image.

View File

@ -1 +0,0 @@
{"00000000-0000-0000-0000-000000000000":{"id":"00000000-0000-0000-0000-000000000000","mode":"META","atime":1545098425757,"ctime":1545098425757,"mtime":1545098425757,"rnode":"A63180BA-9BCE-4245-86E8-8613726587CA"},"A63180BA-9BCE-4245-86E8-8613726587CA":{"id":"A63180BA-9BCE-4245-86E8-8613726587CA","mode":"DIRECTORY","size":0,"atime":1545098425770,"ctime":1545098425770,"mtime":1545098425770,"flags":[],"xattrs":{},"nlinks":1,"version":0,"nblocks":1,"data":"3A41160B-1F39-4162-85BD-473F16ABF35F"},"3A41160B-1F39-4162-85BD-473F16ABF35F":{"dir":{"id":"E1634C88-337D-4D1E-9155-81B099BC7E7C","type":"DIRECTORY"},"file.txt":{"id":"DD338D5C-34FF-4AF7-AFBB-C01E8214125F","type":"FILE"}},"E1634C88-337D-4D1E-9155-81B099BC7E7C":{"id":"E1634C88-337D-4D1E-9155-81B099BC7E7C","mode":"DIRECTORY","size":0,"atime":1545098425778,"ctime":1545098425778,"mtime":1545098425778,"flags":[],"xattrs":{},"nlinks":1,"version":0,"nblocks":1,"data":"C8BB5894-9B0C-4CDC-B014-3944FB40DA77"},"C8BB5894-9B0C-4CDC-B014-3944FB40DA77":{"file2.txt":{"id":"EEB56FEC-DABE-48BF-88A5-B9F37723A0BC","type":"FILE"}},"DD338D5C-34FF-4AF7-AFBB-C01E8214125F":{"id":"DD338D5C-34FF-4AF7-AFBB-C01E8214125F","mode":"FILE","size":16,"atime":1545098327877.7542,"ctime":1545098313562.6824,"mtime":1545098313562.6824,"flags":[],"xattrs":{},"nlinks":1,"version":1,"nblocks":1,"data":"BDEE2339-6992-4D8F-AFAA-81813C30592C"},"BDEE2339-6992-4D8F-AFAA-81813C30592C":{"type":"Buffer","data":[84,104,105,115,32,105,115,32,97,32,102,105,108,101,46,10]},"EEB56FEC-DABE-48BF-88A5-B9F37723A0BC":{"id":"EEB56FEC-DABE-48BF-88A5-B9F37723A0BC","mode":"FILE","size":23,"atime":1545098327877.7542,"ctime":1545098313562.6824,"mtime":1545098313562.6824,"flags":[],"xattrs":{},"nlinks":1,"version":1,"nblocks":1,"data":"62262B53-32BC-43F6-983B-4127302C2483"},"62262B53-32BC-43F6-983B-4127302C2483":{"type":"Buffer","data":[84,104,105,115,32,105,115,32,97,32,115,101,99,111,110,100,32,102,105,108,101,46,10]}}

View File

@ -1,91 +0,0 @@
const expect = require('chai').expect;
const Filer = require('../../../src');
const SerializableMemoryProvider = require('../../lib/serializable-memory-provider');
const nodeFs = require('fs');
const nodePath = require('path');
describe('Migration tests from Filer 0.43 to current', () => {
let filerFs;
before(done => {
// Let the provider parse the JSON
const imagePath = nodePath.resolve(__dirname, '../images/tiny-fs.0.43.json');
nodeFs.readFile(imagePath, 'utf8', (err, data) => {
if(err) throw err;
new Filer.FileSystem({
provider: new SerializableMemoryProvider('0.43', data)
}, (err, fs) => {
if(err) throw err;
filerFs = fs;
done();
});
});
});
it('should have a root directory', done => {
filerFs.stat('/', (err, stats) => {
if(err) throw err;
expect(stats).to.be.an('object');
expect(stats.isDirectory()).to.be.true;
done();
});
});
it('should have expected entries in root dir', done => {
filerFs.readdir('/', (err, entries) => {
if(err) throw err;
expect(entries).to.be.an('array');
expect(entries.length).to.equal(2);
expect(entries).to.contain('file.txt');
expect(entries).to.contain('dir');
done();
});
});
it('should have correct contents for /file.txt (read as String)', done => {
const fileTxtPath = nodePath.resolve(__dirname, '../tiny-fs/file.txt');
nodeFs.readFile(fileTxtPath, 'utf8', (err, nodeData) => {
if(err) throw err;
filerFs.readFile('/file.txt', 'utf8', (err, filerData) => {
if(err) throw err;
expect(nodeData).to.equal(filerData);
done();
});
});
});
it('should have expected entries in /dir', done => {
filerFs.readdir('/dir', (err, entries) => {
if(err) throw err;
expect(entries).to.be.an('array');
expect(entries.length).to.equal(1);
expect(entries).to.contain('file2.txt');
done();
});
});
it('should have correct contents for /dir/file2.txt (read as Buffer)', done => {
const file2TxtPath = nodePath.resolve(__dirname, '../tiny-fs/dir/file2.txt');
nodeFs.readFile(file2TxtPath, null, (err, nodeData) => {
if(err) throw err;
filerFs.readFile('/dir/file2.txt', null, (err, filerData) => {
if(err) throw err;
expect(nodeData).to.deep.equal(filerData);
done();
});
});
});
});

View File

@ -1 +0,0 @@
This is a second file.

View File

@ -1 +0,0 @@
This is a file.

View File

@ -1,26 +1,24 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Filer Tests</title>
<title>Mocha Tests</title>
<link rel="stylesheet" href="../node_modules/mocha/mocha.css" />
</head>
<body>
<div id="mocha"></div>
<script src="../node_modules/chai/chai.js"></script>
<script src="../node_modules/mocha/mocha.js"></script>
<link rel="stylesheet" href="//cdnjs.cloudflare.com/ajax/libs/mocha/1.21.4/mocha.css" />
<script src="//cdnjs.cloudflare.com/ajax/libs/mocha/1.21.4/mocha.js"></script>
<script>window.mocha || document.write('<script src="../node_modules/mocha/mocha.js"><\/script>')</script>
<script src="//cdnjs.cloudflare.com/ajax/libs/chai/1.9.2/chai.min.js"></script>
<script>window.chai || document.write('<script src="../node_modules/chai/chai.js"><\/script>')</script>
<script>
mocha.setup('bdd').timeout(10000).slow(250);
mocha.setup('bdd').timeout(5000).slow(250);;
window.onload = function() {
mocha.checkLeaks();
mocha.run();
};
</script>
<!-- Add any new tests to `tests/index.js` -->
<script src="./index.js"></script>
<script src="../dist/filer-test.js"></script>
</head>
<body>
<div id="mocha"></div>
</body>
</html>

View File

@ -3,97 +3,77 @@
* get them running by default.
*/
// Shims
require('./spec/shims/fs.spec');
require('./spec/shims/path.spec');
// Webpack Plugin
require('./spec/webpack-plugin/webpack-plugin.spec');
// Filer
require('./spec/filer.spec');
require('./spec/filer.buffer.spec.js');
require("./spec/filer.spec");
// Filer.FileSystem.*
require('./spec/filer.filesystem.spec');
require('./spec/fs.spec');
require('./spec/fs.access.spec');
require('./spec/fs.stat.spec');
require('./spec/fs.lstat.spec');
require('./spec/fs.exists.spec');
require('./spec/fs.mknod.spec');
require('./spec/fs.mkdir.spec');
require('./spec/fs.mkdtemp.spec');
require('./spec/fs.readdir.spec');
require('./spec/fs.rmdir.spec');
require('./spec/fs.open.spec');
require('./spec/fs.write.spec');
require('./spec/fs.writeFile-readFile.spec');
require('./spec/fs.appendFile.spec');
require('./spec/fs.read.spec');
require('./spec/fs.close.spec');
require('./spec/fs.fsync.spec');
require('./spec/fs.link.spec');
require('./spec/fs.unlink.spec');
require('./spec/fs.rename.spec');
require('./spec/fs.lseek.spec');
require('./spec/fs.symlink.spec');
require('./spec/fs.readlink.spec');
require('./spec/fs.truncate.spec');
require('./spec/fs.ftruncate.spec');
require('./spec/fs.utimes.spec');
require('./spec/fs.xattr.spec');
require('./spec/path-resolution.spec');
require('./spec/trailing-slashes.spec');
require('./spec/times.spec');
require('./spec/time-flags.spec');
require('./spec/fs.watch.spec');
require('./spec/fs.unwatchFile.spec');
require('./spec/errors.spec');
require('./spec/fs.shell.spec');
require('./spec/fs.chmod.spec');
require('./spec/fs.chown.spec');
require('./spec/fs.copyFile.spec');
require("./spec/filer.filesystem.spec");
require("./spec/fs.spec");
require("./spec/fs.stat.spec");
require("./spec/fs.lstat.spec");
require("./spec/fs.exists.spec");
require("./spec/fs.mknod.spec");
require("./spec/fs.mkdir.spec");
require("./spec/fs.readdir.spec");
require("./spec/fs.rmdir.spec");
require("./spec/fs.open.spec");
require("./spec/fs.write.spec");
require("./spec/fs.writeFile-readFile.spec");
require("./spec/fs.appendFile.spec");
require("./spec/fs.read.spec");
require("./spec/fs.close.spec");
require("./spec/fs.link.spec");
require("./spec/fs.unlink.spec");
require("./spec/fs.rename.spec");
require("./spec/fs.lseek.spec");
require("./spec/fs.symlink.spec");
require("./spec/fs.readlink.spec");
require("./spec/fs.truncate.spec");
require("./spec/fs.utimes.spec");
require("./spec/fs.xattr.spec");
require("./spec/fs.stats.spec");
require("./spec/path-resolution.spec");
require("./spec/trailing-slashes.spec");
require("./spec/times.spec");
require("./spec/time-flags.spec");
require("./spec/fs.watch.spec");
require("./spec/errors.spec");
require("./spec/fs.shell.spec");
// Filer.FileSystem.providers.*
require('./spec/providers/providers.spec');
require('./spec/providers/providers.indexeddb.spec');
require('./spec/providers/providers.memory.spec');
require('./spec/providers/serializable-memory-provider.spec');
require("./spec/providers/providers.spec");
require("./spec/providers/providers.indexeddb.spec");
require("./spec/providers/providers.websql.spec");
require("./spec/providers/providers.memory.spec");
// Filer.FileSystemShell.*
require('./spec/shell/cd.spec');
require('./spec/shell/touch.spec');
require('./spec/shell/exec.spec');
require('./spec/shell/cat.spec');
require('./spec/shell/ls.spec');
require('./spec/shell/rm.spec');
require('./spec/shell/env.spec');
require('./spec/shell/mkdirp.spec');
require('./spec/shell/find.spec');
require("./spec/shell/cd.spec");
require("./spec/shell/touch.spec");
require("./spec/shell/exec.spec");
require("./spec/shell/cat.spec");
require("./spec/shell/ls.spec");
require("./spec/shell/rm.spec");
require("./spec/shell/env.spec");
require("./spec/shell/mkdirp.spec");
require("./spec/shell/find.spec");
// Ported node.js tests (filenames match names in https://github.com/joyent/node/tree/master/test)
require('./spec/node-js/simple/test-fs-mkdir');
require('./spec/node-js/simple/test-fs-null-bytes');
require('./spec/node-js/simple/test-fs-watch');
require('./spec/node-js/simple/test-fs-watch-recursive');
require("./spec/node-js/simple/test-fs-mkdir");
require("./spec/node-js/simple/test-fs-null-bytes");
require("./spec/node-js/simple/test-fs-watch");
require("./spec/node-js/simple/test-fs-watch-recursive");
// Regressions, Bugs
require('./bugs/issue105');
require('./bugs/issue106');
require('./bugs/issue239');
require('./bugs/issue249');
require('./bugs/ls-depth-bug');
require('./bugs/issue247.js');
require('./bugs/issue254.js');
require('./bugs/issue258.js');
require('./bugs/issue267.js');
require('./bugs/issue270.js');
require('./bugs/rename-dir-trailing-slash.js');
require('./bugs/issue357.js');
require('./bugs/issue773.js');
require('./bugs/issue775.js');
require('./bugs/issue776.js');
// Sample code from README
require('./spec/readme.example.spec');
// NOTE: bugs/issue225.js has to be run outside this step, see gruntfile.js
require("./bugs/issue105");
require("./bugs/issue106");
require("./bugs/issue239");
require("./bugs/issue249");
require("./bugs/ls-depth-bug");
require("./bugs/issue247.js");
require("./bugs/issue254.js");
require("./bugs/issue258.js");
require("./bugs/issue267.js");
require("./bugs/issue270.js");
require("./bugs/rename-dir-trailing-slash.js");
require("./bugs/issue357.js");

View File

@ -1,8 +1,11 @@
'use strict';
var Filer = require("../..");
const Filer = require('../../src');
var indexedDB = global.indexedDB ||
global.mozIndexedDB ||
global.webkitIndexedDB ||
global.msIndexedDB;
let needsCleanup = [];
var needsCleanup = [];
if(global.addEventListener) {
global.addEventListener('beforeunload', function() {
needsCleanup.forEach(function(f) { f(); });
@ -10,8 +13,8 @@ if(global.addEventListener) {
}
function IndexedDBTestProvider(name) {
let _done = false;
let that = this;
var _done = false;
var that = this;
function cleanup(callback) {
callback = callback || function(){};
@ -33,17 +36,11 @@ function IndexedDBTestProvider(name) {
that.provider.db.close();
}
const indexedDB = global.indexedDB ||
global.mozIndexedDB ||
global.webkitIndexedDB ||
global.msIndexedDB;
let request = indexedDB.deleteDatabase(name);
var request = indexedDB.deleteDatabase(name);
request.onsuccess = finished;
request.onerror = finished;
} catch(e) {
/* eslint no-console:0 */
console.log('Failed to delete test database', e);
console.log("Failed to delete test database", e);
finished();
}
}

View File

@ -1,4 +1,4 @@
var Filer = require('../../src');
var Filer = require('../..');
function MemoryTestProvider(name) {
var that = this;

View File

@ -1,42 +0,0 @@
const MemoryProvider = require('../../src/providers/memory');
const { parseBJSON } = require('../lib/test-utils');
class SerializableMemoryProvider extends MemoryProvider {
constructor(name, jsonImage) {
super(name);
this.unparsedJSONImage = jsonImage;
}
/**
* In addition to the usual setup of a Memory provider,
* also parse and overwrite the internal database.
*/
open(callback) {
super.open(err => {
if(err) {
return callback(err);
}
// If we don't have an image to import, leave db as is
if(!this.unparsedJSONImage) {
return callback();
}
// Try to import the fs image from JSON
try {
this.db = parseBJSON(this.unparsedJSONImage);
this.unparsedJSONImage = null;
callback();
} catch(e) {
callback(new Error(`unable to parse JSON filesystem image: ${e.message}`));
}
});
}
export() {
return JSON.stringify(this.db);
}
}
module.exports = SerializableMemoryProvider;

View File

@ -1,5 +1,6 @@
var Filer = require('../../src');
var Filer = require('../..');
var IndexedDBTestProvider = require('./indexeddb.js');
var WebSQLTestProvider = require('./websql.js');
var MemoryTestProvider = require('./memory.js');
var Url = require('url');
@ -15,8 +16,13 @@ function uniqueName() {
function findBestProvider() {
var providers = Filer.FileSystem.providers;
return providers.IndexedDB.isSupported() ?
IndexedDBTestProvider : MemoryTestProvider;
if(providers.IndexedDB.isSupported()) {
return IndexedDBTestProvider;
}
if(providers.WebSQL.isSupported()) {
return WebSQLTestProvider;
}
return MemoryTestProvider;
}
function getUrlParams() {
@ -43,24 +49,6 @@ function getProviderType() {
return queryString['filer-provider'] || defaultProvider;
}
// Run fn() in an environment with indexedDB available
// either as-is, or shimmed, removing when done.
function shimIndexedDB(fn) {
var addShim = !Filer.FileSystem.providers.IndexedDB.isSupported();
if(addShim) {
global.indexedDB = require('fake-indexeddb');
}
var result = fn();
if(addShim) {
delete global.indexedDB;
}
return result;
}
function setup(callback) {
// In browser we support specifying the provider via the query string
// (e.g., ?filer-provider=IndexedDB). If not specified, we use
@ -73,6 +61,9 @@ function setup(callback) {
case 'indexeddb':
_provider = new IndexedDBTestProvider(name);
break;
case 'websql':
_provider = new WebSQLTestProvider(name);
break;
case 'memory':
_provider = new MemoryTestProvider(name);
break;
@ -104,14 +95,14 @@ function setup(callback) {
function fs() {
if(!_fs) {
throw new Error('TestUtil: call setup() before fs()');
throw "TestUtil: call setup() before fs()";
}
return _fs;
}
function provider() {
if(!_provider) {
throw new Error('TestUtil: call setup() before provider()');
throw "TestUtil: call setup() before provider()";
}
return _provider;
}
@ -149,34 +140,6 @@ function typedArrayEqual(a, b) {
return true;
}
/**
* Parse JSON with serialized Buffers
*/
const parseBJSON = json =>
JSON.parse(json, (key, value) =>
value && value.type === 'Buffer' ?
Buffer.from(value.data) :
value
);
function createMockFn(implementation = undefined) {
const calls = [];
const mockFn = function(...args) {
calls.push({
args,
});
if (typeof implementation === 'function') {
return implementation(...args);
}
};
Object.defineProperty(mockFn, 'calls', {
get() {
return calls;
}
});
return mockFn;
}
module.exports = {
uniqueName: uniqueName,
setup: setup,
@ -185,11 +148,9 @@ module.exports = {
provider: provider,
providers: {
IndexedDB: IndexedDBTestProvider,
WebSQL: WebSQLTestProvider,
Memory: MemoryTestProvider
},
cleanup: cleanup,
typedArrayEqual: typedArrayEqual,
parseBJSON,
shimIndexedDB,
createMockFn
typedArrayEqual: typedArrayEqual
};

49
tests/lib/websql.js Normal file
View File

@ -0,0 +1,49 @@
var Filer = require('../..');
var needsCleanup = [];
if(global.addEventListener) {
global.addEventListener('beforeunload', function() {
needsCleanup.forEach(function(f) { f(); });
});
}
function WebSQLTestProvider(name) {
var _done = false;
var that = this;
function cleanup(callback) {
callback = callback || function(){};
if(!that.provider || _done) {
return callback();
}
// Provider is there, but db was never touched
if(!that.provider.db) {
return callback();
}
var context = that.provider.getReadWriteContext();
context.clear(function() {
that.provider = null;
_done = true;
callback();
});
}
function init() {
if(that.provider) {
return;
}
that.provider = new Filer.FileSystem.providers.WebSQL(name);
needsCleanup.push(cleanup);
}
this.init = init;
this.cleanup = cleanup;
}
WebSQLTestProvider.isSupported = function() {
return Filer.FileSystem.providers.WebSQL.isSupported();
};
module.exports = WebSQLTestProvider;

View File

@ -1,10 +1,8 @@
'use strict';
var Filer = require('../..');
var expect = require('chai').expect;
const Filer = require('../../src');
const expect = require('chai').expect;
describe('Filer.Errors', function() {
it('has expected errors', function() {
describe("Filer.Errors", function() {
it("has expected errors", function() {
expect(Filer.Errors).to.exist;
// By ctor -- if you add some to src/errors.js, also add here
@ -137,7 +135,7 @@ describe('Filer.Errors', function() {
});
it('should include all expected properties by default', function() {
const err = new Filer.Errors.ENOENT();
var err = new Filer.Errors.ENOENT();
expect(err.name).to.equal('ENOENT');
expect(err.code).to.equal('ENOENT');
expect(err.errno).to.equal(34);
@ -145,7 +143,7 @@ describe('Filer.Errors', function() {
});
it('should include extra properties when provided', function() {
const err = new Filer.Errors.ENOENT('This is the message', '/this/is/the/path');
var err = new Filer.Errors.ENOENT('This is the message', '/this/is/the/path');
expect(err.name).to.equal('ENOENT');
expect(err.code).to.equal('ENOENT');
expect(err.errno).to.equal(34);
@ -154,29 +152,29 @@ describe('Filer.Errors', function() {
});
it('should include default message and path info when provided', function() {
const err = new Filer.Errors.ENOENT(null, '/this/is/the/path');
var err = new Filer.Errors.ENOENT(null, '/this/is/the/path');
expect(err.message).to.equal('no such file or directory');
expect(err.path).to.equal('/this/is/the/path');
});
it('should include just the message when no path provided', function() {
const err = new Filer.Errors.ENOENT();
var err = new Filer.Errors.ENOENT();
expect(err.message).to.equal('no such file or directory');
expect(err.path).not.to.exist;
});
it('should not include path in toString() when not provided', function() {
const err = new Filer.Errors.ENOENT('This is the message');
expect(err.toString()).to.equal('ENOENT: This is the message');
var err = new Filer.Errors.ENOENT('This is the message');
expect(err.toString()).to.equal("ENOENT: This is the message");
});
it('should include path in toString() when provided', function() {
const err = new Filer.Errors.ENOENT(null, '/this/is/the/path');
expect(err.toString()).to.equal('ENOENT: no such file or directory, \'/this/is/the/path\'');
var err = new Filer.Errors.ENOENT(null, '/this/is/the/path');
expect(err.toString()).to.equal("ENOENT: no such file or directory, '/this/is/the/path'");
});
it('should include message and path info when provided', function() {
const err = new Filer.Errors.ENOENT('This is the message', '/this/is/the/path');
var err = new Filer.Errors.ENOENT('This is the message', '/this/is/the/path');
expect(err.message).to.equal('This is the message');
expect(err.path).to.equal('/this/is/the/path');
});

View File

@ -1,51 +0,0 @@
'use strict';
const Filer = require('../../src');
const expect = require('chai').expect;
describe('Filer.Buffer', function() {
it('should support .from()', function() {
expect(Filer.Buffer.from).to.be.a('function');
});
it('should support .alloc()', function() {
expect(Filer.Buffer.alloc).to.be.a('function');
});
it('should support .isBuffer()', function() {
const buf = Buffer.alloc(0);
expect(Buffer.isBuffer(buf)).to.be.true;
});
describe('Deprecation checks - constructor vs. class method init', function() {
it('should allow new Buffer(array)', function() {
const arr = [1, 2, 3];
const buf1 = new Buffer(arr);
const buf2 = new Buffer.from(arr);
expect(buf1).to.deep.equal(buf2);
});
it('should allow new Buffer(ArrayBuffer)', function() {
const arrayBuffer = (new Uint8Array([1, 2, 3])).buffer;
const buf1 = new Buffer(arrayBuffer);
const buf2 = Buffer.from(arrayBuffer);
expect(buf1).to.deep.equal(buf2);
});
it('should allow new Buffer(ArrayBuffer)', function() {
const buffer = new Buffer.from([1, 2, 3]);
const buf1 = new Buffer(buffer);
const buf2 = Buffer.from(buffer);
expect(buf1).to.deep.equal(buf2);
});
it('should allow new Buffer(string)', function() {
const s = 'Hello World';
const buf1 = new Buffer(s);
const buf2 = Buffer.from(s);
expect(buf1).to.deep.equal(buf2);
});
});
});

View File

@ -1,17 +1,16 @@
'use strict';
const Filer = require('../../src');
const util = require('../lib/test-utils.js');
const expect = require('chai').expect;
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;
describe('Filer.FileSystem', function() {
describe("Filer.FileSystem", function() {
beforeEach(util.setup);
afterEach(util.cleanup);
it('should properly mount new or existing filesystem', function(done) {
const provider = util.provider().provider;
var provider = util.provider().provider;
// 1) Should be able to open a new filesystem, and get empty root
const fs1 = new Filer.FileSystem({provider: provider}, function() {
var fs1 = new Filer.FileSystem({provider: provider}, function() {
fs1.readdir('/', function(err, entries) {
expect(err).not.to.exist;
expect(entries).to.be.an('array');
@ -21,7 +20,7 @@ describe('Filer.FileSystem', function() {
if(err) throw err;
// 2) Should be able to open an existing filesystem
const fs2 = new Filer.FileSystem({provider: provider}, function() {
var fs2 = new Filer.FileSystem({provider: provider}, function() {
fs2.readdir('/', function(err, entries) {
expect(err).not.to.exist;
expect(entries).to.be.an('array');
@ -30,7 +29,7 @@ describe('Filer.FileSystem', function() {
// 3) FORMAT flag should wipe an existing filesystem
const fs3 = new Filer.FileSystem({provider: provider, flags: ['FORMAT']}, function() {
var fs3 = new Filer.FileSystem({provider: provider, flags: ['FORMAT']}, function() {
fs3.readdir('/', function(err, entries) {
expect(err).not.to.exist;
expect(entries).to.be.an('array');

View File

@ -1,84 +1,16 @@
'use strict';
const Filer = require('../../src');
const util = require('../lib/test-utils');
const expect = require('chai').expect;
var Filer = require('../..');
var expect = require('chai').expect;
describe('Filer', function() {
it('is defined', function() {
describe("Filer", function() {
it("is defined", function() {
expect(typeof Filer).not.to.equal(undefined);
});
it('has FileSystem constructor', function() {
it("has FileSystem constructor", function() {
expect(typeof Filer.FileSystem).to.equal('function');
});
it('has Buffer constructor', function() {
expect(typeof Filer.Buffer).to.equal('function');
});
it('has Path and path objects', function() {
expect(typeof Filer.Path).to.equal('object');
expect(typeof Filer.path).to.equal('object');
expect(Filer.Path).to.equal(Filer.path);
});
it('has Errors object', function() {
expect(typeof Filer.Errors).to.equal('object');
});
it('has an fs object that returns a Filer.FileSystem', function() {
// Depends on IndexedDB being available, since we can't
// configure our own test provider. Shim for coverage.
util.shimIndexedDB(function() {
expect(typeof Filer.fs).to.equal('object');
const fs1 = Filer.fs;
const fs2 = Filer.fs;
expect(fs1).to.be.an.instanceof(Filer.FileSystem);
expect(fs2).to.be.an.instanceof(Filer.FileSystem);
expect(fs1).to.equal(fs2);
});
});
it('has Shell constructor', function() {
it("has Shell constructor", function() {
expect(typeof Filer.Shell).to.equal('function');
});
it('must honor the \'FORMAT\' flag', function(done) {
const name = 'local-test';
// Because we need to use a bunch of Filer filesystems
// in this test, we can't use the usual test infrastructure
// to create/manage the fs instance. Pick the best one
// based on the testing environment (browser vs. node)
const providers = Filer.FileSystem.providers;
let Provider;
if(providers.IndexedDB.isSupported()) {
Provider = providers.IndexedDB;
} else {
Provider = providers.Memory;
}
let fs = new Filer.FileSystem({name, provider: new Provider(name)});
let fs2 = new Filer.FileSystem({name, provider: new Provider(name)});
fs.mkdir('/test', function(err){
if(err) throw err;
fs2.readdir('/', function(err, list) {
if(err) throw err;
expect(list).to.exist;
expect(list).to.have.length(1);
fs2 = new Filer.FileSystem({name, provider: new Provider(name), flags:['FORMAT']});
fs2.readdir('/', function(err, list2) {
expect(err).to.not.exist;
expect(list2).to.exist;
expect(list2).to.have.length(0);
done();
});
});
});
});
});

View File

@ -1,151 +0,0 @@
'use strict';
const util = require('../lib/test-utils.js');
const expect = require('chai').expect;
describe('fs.access', function () {
beforeEach(util.setup);
afterEach(util.cleanup);
it('should expose access mode flags on fs and fs.constants', function() {
const fs = util.fs();
// F_OK
expect(fs.F_OK).to.equal(0);
expect(fs.constants.F_OK).to.equal(0);
// R_OK
expect(fs.R_OK).to.equal(4);
expect(fs.constants.R_OK).to.equal(4);
// W_OK
expect(fs.W_OK).to.equal(2);
expect(fs.constants.W_OK).to.equal(2);
// X_OK
expect(fs.X_OK).to.equal(1);
expect(fs.constants.X_OK).to.equal(1);
});
it('should be a function', function () {
const fs = util.fs();
expect(typeof fs.access).to.equal('function');
});
it('should return an error if file does not exist', function (done) {
const fs = util.fs();
fs.access('/tmp', fs.constants.F_OK, function (error) {
expect(error).to.exist;
expect(error.code).to.equal('ENOENT');
done();
});
});
it('should return no error if file does exist and mode = F_OK', function (done) {
const fs = util.fs();
const contents = 'This is a file.';
fs.writeFile('/myfile', contents, function (error) {
if (error) throw error;
fs.access('/myfile', fs.constants.F_OK, function (error) {
expect(error).not.to.exist;
done();
});
});
});
it('should return no error if file does exist and mode = R_OK', function (done) {
const fs = util.fs();
const contents = 'This is a file.';
fs.writeFile('/myfile', contents, function (error) {
if (error) throw error;
fs.access('/myfile', fs.constants.R_OK, function (error) {
expect(error).not.to.exist;
done();
});
});
});
it('should return no error if file does exist and mode = W_OK', function (done) {
const fs = util.fs();
const contents = 'This is a file.';
fs.writeFile('/myfile', contents, function (error) {
if (error) throw error;
fs.access('/myfile', fs.constants.W_OK, function (error) {
expect(error).not.to.exist;
done();
});
});
});
it('should return an error if file is not executable and mode = X_OK', function (done) {
const fs = util.fs();
const contents = 'This is a file.';
fs.writeFile('/myfile', contents, function (error) {
if (error) throw error;
fs.chmod('/myfile', '644', function(error){
if (error) throw error;
fs.access('/myfile', fs.constants.X_OK, function (error) {
expect(error).to.exist;
expect(error.code).to.equal('EACCES');
done();
});
});
});
});
it('should return no error if file does exist and mode = X_OK', function (done) {
const fs = util.fs();
const contents = 'This is a file.';
fs.writeFile('/myfile', contents, function (error) {
if (error) throw error;
fs.chmod('/myfile', 0o777, function(error) {
if (error) throw error;
fs.access('/myfile', fs.constants.X_OK, function (error) {
expect(error).not.to.exist;
done();
});
});
});
});
it('should return no error if file does exist and no mode is passed', function (done) {
const fs = util.fs();
const contents = 'This is a file.';
fs.writeFile('/myfile', contents, function (error) {
if (error) throw error;
fs.access('/myfile', function (error) {
expect(error).not.to.exist;
done();
});
});
});
it('should return no error if file does exist and mode = R_OK | W_OK', function (done) {
const fs = util.fs();
const contents = 'This is a file.';
fs.writeFile('/myfile', contents, function (error) {
if (error) throw error;
fs.access('/myfile', fs.constants.R_OK | fs.constants.W_OK, function (error) {
expect(error).not.to.exist;
done();
});
});
});
});

View File

@ -1,15 +1,12 @@
'use strict';
const util = require('../lib/test-utils.js');
const expect = require('chai').expect;
var Filer = require('../..');
var util = require('../lib/test-utils.js');
var expect = require('chai').expect;
describe('fs.appendFile', function() {
const contents = 'This is a file.';
beforeEach(function(done) {
util.setup(function() {
const fs = util.fs();
fs.writeFile('/myfile', contents, function(error) {
var fs = util.fs();
fs.writeFile('/myfile', "This is a file.", { encoding: 'utf8' }, function(error) {
if(error) throw error;
done();
});
@ -18,13 +15,14 @@ describe('fs.appendFile', function() {
afterEach(util.cleanup);
it('should be a function', function() {
const fs = util.fs();
var fs = util.fs();
expect(fs.appendFile).to.be.a('function');
});
it('should append a utf8 file without specifying utf8 in appendFile', function(done) {
const fs = util.fs();
const more = ' Appended.';
var fs = util.fs();
var contents = "This is a file.";
var more = " Appended.";
fs.appendFile('/myfile', more, function(error) {
if(error) throw error;
@ -38,8 +36,9 @@ describe('fs.appendFile', function() {
});
it('should append a utf8 file with "utf8" option to appendFile', function(done) {
const fs = util.fs();
const more = ' Appended.';
var fs = util.fs();
var contents = "This is a file.";
var more = " Appended.";
fs.appendFile('/myfile', more, 'utf8', function(error) {
if(error) throw error;
@ -53,8 +52,9 @@ describe('fs.appendFile', function() {
});
it('should append a utf8 file with {encoding: "utf8"} option to appendFile', function(done) {
const fs = util.fs();
const more = ' Appended.';
var fs = util.fs();
var contents = "This is a file.";
var more = " Appended.";
fs.appendFile('/myfile', more, { encoding: 'utf8' }, function(error) {
if(error) throw error;
@ -68,12 +68,14 @@ describe('fs.appendFile', function() {
});
it('should append a binary file', function(done) {
const fs = util.fs();
var fs = util.fs();
// String and utf8 binary encoded versions of the same thing: 'This is a file.'
const binary = Buffer.from([84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 105, 108, 101, 46]);
const binary2 = Buffer.from([32, 65, 112, 112, 101, 110, 100, 101, 100, 46]);
const binary3 = Buffer.from([84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 105, 108, 101, 46,
// String and utf8 binary encoded versions of the same thing:
var contents = "This is a file.";
var binary = new Buffer([84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 105, 108, 101, 46]);
var more = " Appended.";
var binary2 = new Buffer([32, 65, 112, 112, 101, 110, 100, 101, 100, 46]);
var binary3 = new Buffer([84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 105, 108, 101, 46,
32, 65, 112, 112, 101, 110, 100, 101, 100, 46]);
fs.writeFile('/mybinaryfile', binary, function(error) {
@ -92,9 +94,9 @@ describe('fs.appendFile', function() {
});
it('should follow symbolic links', function(done) {
const fs = util.fs();
const contents = 'This is a file.';
const more = ' Appended.';
var fs = util.fs();
var contents = "This is a file.";
var more = " Appended.";
fs.symlink('/myfile', '/myFileLink', function (error) {
if (error) throw error;
@ -112,7 +114,8 @@ describe('fs.appendFile', function() {
});
it('should work when file does not exist, and create the file', function(done) {
const fs = util.fs();
var fs = util.fs();
var contents = "This is a file.";
fs.appendFile('/newfile', contents, { encoding: 'utf8' }, function(error) {
expect(error).not.to.exist;
@ -124,101 +127,4 @@ describe('fs.appendFile', function() {
});
});
});
it('should accept numbers and append them to the file', function(done) {
const fs = util.fs();
const more = 10000;
fs.appendFile('/myfile', more, 'utf8', function(error) {
if(error) throw error;
fs.readFile('/myfile', 'utf8', function(error, data) {
expect(error).not.to.exist;
expect(data).to.equal(contents + more);
done();
});
});
});
});
describe('fs.promises.appendFile', function() {
beforeEach(function(done) {
util.setup(function() {
const fs = util.fs();
return fs.promises.writeFile('/myfile', 'This is a file.', { encoding: 'utf8' })
.then(done)
.catch(done);
});
});
afterEach(util.cleanup);
it('should be a function', function() {
const fs = util.fs();
expect(fs.promises.appendFile).to.be.a('function');
});
it('should append a utf8 file without specifying utf8 in appendFile', function() {
const fs = util.fs();
const contents = 'This is a file.';
const more = ' Appended.';
return fs.promises.appendFile('/myfile', more)
.then(() => fs.promises.readFile('/myfile', 'utf8'))
.then(data => expect(data).to.equal(contents + more));
});
it('should append a utf8 file with "utf8" option to appendFile', function() {
const fs = util.fs();
const contents = 'This is a file.';
const more = ' Appended.';
return fs.promises.appendFile('/myfile', more, 'utf8')
.then(() => fs.promises.readFile('/myfile', 'utf8'))
.then(data => expect(data).to.equal(contents + more));
});
it('should append a utf8 file with {encoding: "utf8"} option to appendFile', function() {
const fs = util.fs();
const contents = 'This is a file.';
const more = ' Appended.';
return fs.promises.appendFile('/myfile', more, { encoding: 'utf8' })
.then(() => fs.promises.readFile('/myfile', { encoding: 'utf8' }))
.then(data => expect(data).to.equal(contents + more));
});
it('should append a binary file', function() {
const fs = util.fs();
// String and utf8 binary encoded versions of the same thing: 'This is a file.'
const binary = new Buffer([84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 105, 108, 101, 46]);
const binary2 = new Buffer([32, 65, 112, 112, 101, 110, 100, 101, 100, 46]);
const binary3 = new Buffer([84, 104, 105, 115, 32, 105, 115, 32, 97, 32, 102, 105, 108, 101, 46,
32, 65, 112, 112, 101, 110, 100, 101, 100, 46]);
return fs.promises.writeFile('/mybinaryfile', binary)
.then(() => fs.promises.appendFile('/mybinaryfile', binary2))
.then(() => fs.promises.readFile('/mybinaryfile', 'ascii'))
.then(data => expect(data).to.deep.equal(binary3));
});
it('should follow symbolic links', function() {
const fs = util.fs();
const contents = 'This is a file.';
const more = ' Appended.';
return fs.promises.symlink('/myfile', '/myFileLink')
.then(() => fs.promises.appendFile('/myFileLink', more, 'utf8'))
.then(() => fs.promises.readFile('/myFileLink', 'utf8'))
.then(data => expect(data).to.equal(contents + more));
});
it('should work when file does not exist, and create the file', function() {
const fs = util.fs();
const contents = 'This is a file.';
return fs.promises.appendFile('/newfile', contents, { encoding: 'utf8' })
.then(() => fs.promises.readFile('/newfile', 'utf8'))
.then(data => expect(data).to.equal(contents));
});
});

View File

@ -1,178 +0,0 @@
'use strict';
const util = require('../lib/test-utils.js');
const expect = require('chai').expect;
describe('fs.chmod, fs.fchmod', function() {
beforeEach(util.setup);
afterEach(util.cleanup);
it('should be functions', function() {
const fs = util.fs();
expect(typeof fs.chmod).to.equal('function');
expect(typeof fs.fchmod).to.equal('function');
});
it('should automatically set mode=755 for a directory', function(done) {
const fs = util.fs();
fs.mkdir('/dir', function(err) {
if(err) throw err;
fs.stat('/dir', function(err, stats) {
if(err) throw err;
expect(stats.mode & 0o755).to.equal(0o755);
done();
});
});
});
it('should automatically set mode=644 for a file', function(done) {
const fs = util.fs();
fs.open('/file', 'w', function(err, fd) {
if(err) throw err;
fs.fstat(fd, function(err, stats) {
if(err) throw err;
expect(stats.mode & 0o644).to.equal(0o644);
fs.close(fd, done);
});
});
});
it('should be an error when the path is invalid', function(done){
const fs = util.fs();
fs.chmod('/invalid_path', 0o444, function(err){
expect(err).to.exist;
expect(err.code).to.equal('ENOENT');
done();
});
});
it('should error if mode value is a non-numeric string', function(done) {
const fs = util.fs();
fs.mkdir('/dir', function(err) {
if(err) throw err;
fs.chmod('/dir', 'mode', function(err) {
expect(err).to.exist;
expect(err.code).to.equal('EINVAL');
done();
});
});
});
it('should error if mode value is null', function(done) {
const fs = util.fs();
fs.mkdir('/dir', function(err) {
if(err) throw err;
fs.chmod('/dir', 'null', function(err) {
expect(err).to.exist;
expect(err.code).to.equal('EINVAL');
done();
});
});
});
it('should error if mode value is non-integer number', function(done) {
const fs = util.fs();
fs.mkdir('/dir', function(err) {
if(err) throw err;
fs.chmod('/dir', 3.14, function(err) {
expect(err).to.exist;
expect(err.code).to.equal('EINVAL');
done();
});
});
});
it('should error if mode value is non-integer number', function(done) {
const fs = util.fs();
fs.mkdir('/dir', function(err) {
if(err) throw err;
fs.chmod('/dir', 3.14, function(err) {
expect(err).to.exist;
expect(err.code).to.equal('EINVAL');
done();
});
});
});
it('should allow octal strings for mode value', function(done) {
const fs = util.fs();
fs.mkdir('/dir', function(err) {
if(err) throw err;
fs.chmod('/dir', '777', function(err) {
if(err) throw err;
fs.stat('/dir/', function(err, stats) {
if(err) throw err;
expect(stats.mode & 0o777).to.equal(0o777);
done();
});
});
});
});
it('should allow for updating mode of a given file', function(done) {
const fs = util.fs();
fs.open('/file', 'w', function(err, fd) {
if(err) throw err;
fs.fchmod(fd, 0o777, function(err) {
if(err) throw err;
fs.fstat(fd, function(err, stats) {
if(err) throw err;
expect(stats.mode & 0o777).to.equal(0o777);
fs.close(fd, function(err) {
if(err) throw err;
fs.chmod('/file', 0o444, function(err) {
if(err) throw err;
fs.stat('/file', function(err, stats) {
if(err) throw err;
expect(stats.mode & 0o444).to.equal(0o444);
done();
});
});
});
});
});
});
});
});
describe('fsPromise.chmod', function() {
beforeEach(util.setup);
afterEach(util.setup);
it('should be a function', function() {
const fsPromise = util.fs().promises;
expect(typeof fsPromise.chmod).to.equal('function');
});
it('should allow for updating mode of a given file', function() {
const fsPromise = util.fs().promises;
return fsPromise.open('/file', 'w')
.then(() => fsPromise.chmod('/file', 0o444))
.then(() => fsPromise.stat('/file'))
.then(stats => expect(stats.mode & 0o444).to.equal(0o444));
});
});

Some files were not shown because too many files have changed in this diff Show More