Skip to content

Commit

Permalink
Merge branch 'beta'
Browse files Browse the repository at this point in the history
# Conflicts:
#	.github/workflows/node.js.yml
  • Loading branch information
Bob620 committed Oct 31, 2020
2 parents 22e5ec7 + d7f527a commit 8c4d3c9
Show file tree
Hide file tree
Showing 232 changed files with 116,664 additions and 514 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/node.js.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions

name: Node.js CI
name: All Mocha Tests

on:
push:
Expand All @@ -11,13 +11,13 @@ on:

jobs:
build:

runs-on: [ ubuntu-latest, windows-latest, macos-latest, ubuntu-20.04, ubuntu-18.04 ]

strategy:
matrix:
os: [ ubuntu-latest, windows-latest, macos-latest, ubuntu-18.04, ubuntu-20.04, windows-2019, macos-10.15 ]
node-version: [ 14.x ]

runs-on: ${{ matrix.os }}

steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
Expand Down
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
tests/data/*/*.tif
test/data/*/*.tif
.idea

# Logs
Expand Down
64 changes: 64 additions & 0 deletions .npmignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
test/
.idea

# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*

# Runtime data
pids
*.pid
*.seed
*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov

# Coverage directory used by tools like istanbul
coverage

# nyc test coverage
.nyc_output

# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt

# Bower dependency directory (https://bower.io/)
bower_components

# node-waf configuration
.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release

# Dependency directories
node_modules/
jspm_packages/

# TypeScript v1 declaration files
typings/

# Optional npm cache directory
.npm

# Optional eslint cache
.eslintcache

# Optional REPL history
.node_repl_history

# Output of 'npm pack'
*.tgz

# Yarn Integrity file
.yarn-integrity

# dotenv environment variables file
.env

# next.js build output
.next
2 changes: 1 addition & 1 deletion calculations.js
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ async function calculateConstants(meta, scratchCtx, font) {

const metaConstants = {
width: meta.width,
height: meta.cutoffHeight ? meta.cutoffHeight : meta.height,
height: meta.cutoffHeight ? (meta.cutoffHeight < meta.height ? meta.cutoffHeight : meta.height * 0.9375) : meta.height,
maxWidth: meta.width,
maxHeight: meta.height,
fullHeight: meta.height,
Expand Down
6 changes: 3 additions & 3 deletions constants.json
Original file line number Diff line number Diff line change
Expand Up @@ -175,14 +175,14 @@
},
"pfe": {
"fileFormats": {
"ENTRY": ".mdb",
"IMAGE": ".bim"
"ENTRY": ".MDB",
"IMAGE": ".BIM"
}
},
"jeol": {
"fileFormats": {
"ENTRY": ".txt",
"IMAGE": ".tif",
"IMAGES": [".tif", ".jpg", ".bmp"],
"OUTPUTIMAGE": ".png"
}
}
Expand Down
88 changes: 73 additions & 15 deletions io.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
const fs = require('fs');

const iconv = require('iconv-lite');
let Adodb;

try {
Expand Down Expand Up @@ -47,7 +49,7 @@ function readNSSEntry(uri) {

for (const line of rawData.shift())
if (line.length > 1 && line.includes('.'))
switch (line.split('.')[1].toLowerCase()) {
switch(line.split('.')[1].toLowerCase()) {
case 'psref':
case 'siref':
output.data.base = line;
Expand Down Expand Up @@ -93,13 +95,13 @@ async function getPFEExpectedImages(databaseUri) {

return images.length;

} catch(err) {
} catch (err) {
throw 'Unable to open and read PFE mdb file';
}
}

async function readPFEEntry(databaseUri) {
const [uri, imageNum='1'] = databaseUri.split('?');
const [uri, imageNum = '1'] = databaseUri.split('?');

let connection;

Expand All @@ -123,25 +125,25 @@ async function readPFEEntry(databaseUri) {
const yDiff = Math.abs(yLarge - ySmall);

const points = initialPoints
.map(({Number, StageX, StageY, LineToRow}) => {
return {
name: Number,
analysis: LineToRow,
type: 'spot',
values: [Math.abs(xLarge - StageX), Math.abs(ySmall - StageY), xDiff, yDiff]
}
});
.map(({Number, StageX, StageY, LineToRow}) => {
return {
name: Number,
analysis: LineToRow,
type: 'spot',
values: [Math.abs(xLarge - StageX), Math.abs(ySmall - StageY), xDiff, yDiff]
};
});

return {image, points}
} catch(err) {
return {image, points};
} catch (err) {
if (connection)
await connection.close();
throw 'Unable to open and read PFE mdb file';
}
}

async function readBIM(bimUri) {
let [uri, index='1'] = bimUri.split('?');
let [uri, index = '1'] = bimUri.split('?');
index = parseInt(index);

const data = Buffer.from(await fs.promises.readFile(uri.slice(0, uri.length - constants.pfe.fileFormats.ENTRY.length) + constants.pfe.fileFormats.IMAGE, {encoding: null}));
Expand Down Expand Up @@ -178,7 +180,7 @@ async function readBIM(bimUri) {
}

function readJeolEntry(uri) {
let rawData = fs.readFileSync(uri, {encoding: 'utf8'}).split('$');
let rawData = iconv.decode(fs.readFileSync(uri), 'win1251').split('$');

return rawData.filter(data => data).reduce((output, line) => {
const [prop, ...data] = line.trim().toLowerCase().split(' ');
Expand All @@ -196,7 +198,63 @@ function checkBIMExists(uri) {
return fs.accessSync(uri, fs.constants.R_OK);
}

// nothing exists to do this apparently
function readBmp(uri) {
const raw = fs.readFileSync(uri);

if (raw[0] !== 66 || raw[1] !== 77)
throw new Error('Unsupported bmp format');

const data = new DataView(raw.buffer);

// const fileSize = data.getInt32(2, true);
const offset = data.getInt16(10, true);
const format = data.getInt32(14, true);

if (format !== 40)
throw new Error('Unsupported bmp format');

const width = data.getInt32(18, true);
const height = data.getInt32(22, true);
const planes = data.getInt16(26, true);
const colorDepth = data.getInt16(28, true);
const compressionMethod = data.getInt32(30, true);
// const imageSize = data.getInt32(34, true);
// const horizontalRes = data.getInt32(38, true);
// const verticalRes = data.getInt32(42, true);
// const colorPallet = data.getInt32(46, true);
// const importantColors = data.getInt32(50, true);

if (planes !== 1 || colorDepth !== 8)
throw new Error('Unsupported bmp format');

if (compressionMethod !== 0)
throw new Error('Unsupported compression method');

// const mask = data.buffer.slice(54, offset);

let pixels = [];
let w = 0;
let h = height;
for (let i = 0; i < width * height; i++, w++) {
if (i % width - 1 === 0) {
h--;
w = 0;
}

const pixelOffset = w + (h * width - 1);
pixels[pixelOffset] = data.getInt8(offset + i);
}

return {
pixels: Buffer.from(pixels),
height,
width
};
}

module.exports = {
readBmp,
readMASFile,
readNSSEntry,
getPFEExpectedImages,
Expand Down
46 changes: 27 additions & 19 deletions jeolimage.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,26 +16,34 @@ module.exports = class extends Thermo {

staticInit() {
const entryData = io.readJeolEntry(this.data.files.entry);
try {
this.data.files.base = this.data.uri + this.data.name + constants.jeol.fileFormats.IMAGE;

io.checkJeolExists(this.data.files.base);

this.data.files.layers = [{
element: 'base',
file: this.data.files.base,
cutoffHeight: parseInt(entryData['cm_full_size'][1])
}, {
element: 'solid',
file: ''
}];

this.data.points = {};
this.data.files.points = [];
this.data.data.map = {};
this.data.magnification = parseInt(entryData['cm_mag']);
} catch (err) {
for (let IMAGE of constants.jeol.fileFormats.IMAGES) {
let testBase;
try {
testBase = this.data.uri + this.data.name + IMAGE;
io.checkJeolExists(testBase);

this.data.files.base = testBase;
break;
} catch (err) {
}
}

if (this.data.files.base === '') {
throw 'Not an operable JEOL file';
}

this.data.files.layers = [{
element: 'base',
file: this.data.files.base,
cutoffHeight: parseInt(entryData['cm_full_size'][1])
}, {
element: 'solid',
file: ''
}];

this.data.points = {};
this.data.files.points = [];
this.data.data.map = {};
this.data.magnification = parseInt(entryData['cm_mag']);
}
};
Loading

0 comments on commit 8c4d3c9

Please sign in to comment.