-
Notifications
You must be signed in to change notification settings - Fork 4
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Create new types of collections #25
Closed
+686
−2
Closed
Changes from all commits
Commits
Show all changes
17 commits
Select commit
Hold shift + click to select a range
3cfbb4c
create initial types of collections defined in gsoc
brauliorivas 186024e
loading function to check if types are correctly working
brauliorivas fbfb0ea
test suite for loadParticles and buildLoader
brauliorivas c7b270c
change example data for RecoParticle test
brauliorivas 170de18
Merge branch 'main' into new-types
brauliorivas 27d03ec
check if previews work
brauliorivas 0a52737
small change
brauliorivas 02392eb
another small change
brauliorivas 13978a4
remove unnecessary exclamation mark
brauliorivas 5f48b1d
eliminate try catch
brauliorivas 00bb203
add version check + dynamic load of object
brauliorivas 83a9bbd
update tests
brauliorivas 3e64c41
types for RecoParticle relations including 1:1 and 1:many
brauliorivas f7a14c5
add links for types
brauliorivas 904cd51
test for loading functions
brauliorivas ba9630c
test reconstruction
brauliorivas 69f3fd6
remove a few things
brauliorivas File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
export function dynamicLoad(object, data, ignore = null) { | ||
let filteredData = {}; | ||
|
||
if (ignore !== null) { | ||
for (const key in data) { | ||
if (!ignore.has(key)) filteredData[key] = data[key]; | ||
} | ||
} else { | ||
filteredData = data; | ||
} | ||
|
||
for (const [key, value] of Object.entries(filteredData)) { | ||
object[key] = value; | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
import { types } from "./reconstruction.js"; | ||
import { compatible } from "./version.js"; | ||
|
||
export function buildLoader(config, version) { | ||
const newLoader = {}; | ||
|
||
if (typeof config === "string") config = [config]; | ||
|
||
for (const particle of config) { | ||
if (types.hasOwnProperty(particle)) { | ||
const isCompatible = compatible(types[particle], version); | ||
|
||
if (isCompatible) { | ||
newLoader[particle] = types[particle].load; | ||
} else { | ||
newLoader[particle] = () => { | ||
return []; | ||
}; | ||
} | ||
} | ||
} | ||
|
||
return newLoader; | ||
} | ||
|
||
export function loadParticles(jsonData, event, loadersConfig) { | ||
const eventData = jsonData["Event " + event]; | ||
const version = eventData.edm4hepVersion; | ||
|
||
const loader = buildLoader(loadersConfig, version); | ||
|
||
const particles = {}; | ||
Object.keys(loader).forEach((key) => (particles[key] = [])); | ||
|
||
for (const [type, loadFunction] of Object.entries(loader)) { | ||
const particlesType = Object.values(eventData).filter( | ||
(element) => element.collType === `edm4hep::${type}Collection` | ||
); | ||
|
||
particlesType.forEach(({ collection }) => { | ||
const [particles, links] = loadFunction(collection); | ||
}); | ||
} | ||
|
||
return particles; | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,268 @@ | ||
import { dynamicLoad } from "./dynamic.js"; | ||
|
||
export class Cluster { | ||
static MIN_VERSION = "0.7.0"; // may vary per type of particle | ||
static MAX_VERSION = "1.0.0"; | ||
|
||
constructor() { | ||
// Physics properties | ||
this.type = 0; | ||
this.energy = 0; // GeV | ||
this.energyError = 0; // GeV | ||
this.position = {}; // mm | ||
this.positionError = {}; | ||
this.iTheta = 0; | ||
this.phi = 0; | ||
this.directionError = {}; // mm^2 | ||
this.shapeParameters = []; | ||
this.subdetectorEnergies = []; | ||
this.clusters = []; | ||
this.hits = []; | ||
} | ||
|
||
static load(collection) { | ||
const particles = []; | ||
const links = createLinksManager(["clusters", "hits"]); | ||
|
||
for (const [index, particle] of collection.entries()) { | ||
const cluster = new Cluster(); | ||
cluster.index = index; | ||
|
||
extractOneToManyLinks(links, ["clusters", "hits"], cluster, particle); | ||
|
||
dynamicLoad(cluster, particle); | ||
|
||
particles.push(cluster); | ||
} | ||
|
||
return [particles, links]; | ||
} | ||
} | ||
|
||
export class ParticleID { | ||
static MIN_VERSION = "0.7.0"; | ||
static MAX_VERSION = "1.0.0"; | ||
|
||
constructor() { | ||
// Physics properties | ||
this.type = 0; | ||
this.PDG = 0; | ||
this.algorithmType = 0; | ||
this.likelihood = 0; | ||
this.parameters = []; | ||
this.particle = null; | ||
} | ||
|
||
static load(collection) { | ||
const particles = []; | ||
const links = createLinksManager(["particle"]); | ||
|
||
for (const [index, particle] of collection.entries()) { | ||
const particleID = new ParticleID(); | ||
particleID.index = index; | ||
|
||
extractOneToOneLink(links, "particle", particleID, particle); | ||
|
||
dynamicLoad(particleID, particle); | ||
|
||
particles.push(particleID); | ||
} | ||
|
||
return [particles, links]; | ||
} | ||
} | ||
|
||
export class ReconstructedParticle { | ||
static MIN_VERSION = "0.7.0"; | ||
static MAX_VERSION = "1.0.0"; | ||
|
||
constructor() { | ||
// Physics properties | ||
this.PDG = 0; | ||
this.energy = 0; // GeV | ||
this.momentum = {}; // GeV | ||
this.referencePoint = {}; // mm | ||
this.charge = 0; | ||
this.mass = 0; // GeV | ||
this.goodnessOfPID = 0; | ||
this.covMatrix = {}; | ||
this.startVertex = null; | ||
this.clusters = []; | ||
this.tracks = []; | ||
this.particles = []; | ||
} | ||
|
||
static load(collection) { | ||
const particles = []; | ||
const links = createLinksManager([ | ||
"tracks", | ||
"clusters", | ||
"particles", | ||
"startVertex", | ||
]); | ||
|
||
for (const [index, particle] of collection.entries()) { | ||
const reconstructedParticle = new ReconstructedParticle(); | ||
reconstructedParticle.index = index; | ||
|
||
extractOneToManyLinks( | ||
links, | ||
["tracks", "clusters", "particles"], | ||
reconstructedParticle, | ||
particle | ||
); | ||
extractOneToOneLink( | ||
links, | ||
"startVertex", | ||
reconstructedParticle, | ||
particle | ||
); | ||
|
||
dynamicLoad( | ||
reconstructedParticle, | ||
particle, | ||
new Set(["tracks", "clusters", "particles", "startVertex"]) | ||
); | ||
|
||
particles.push(reconstructedParticle); | ||
} | ||
|
||
return [particles, links]; | ||
} | ||
} | ||
|
||
export class Vertex { | ||
static MIN_VERSION = "0.7.0"; | ||
static MAX_VERSION = "1.0.0"; | ||
|
||
constructor() { | ||
// Physics properties | ||
this.primary = 0; | ||
this.chi2 = 0; | ||
this.probability = 0; | ||
this.position = {}; // mm | ||
this.covMatrix = {}; | ||
this.algorithmType = 0; | ||
this.parameters = 0; | ||
this.associatedParticle = null; | ||
} | ||
|
||
static load(collection) { | ||
const particles = []; | ||
const links = createLinksManager(["associatedParticle"]); | ||
|
||
for (const [index, particle] of collection.entries()) { | ||
const vertex = new Vertex(); | ||
vertex.index = index; | ||
|
||
extractOneToOneLink(links, "associatedParticle", vertex, particle); | ||
|
||
dynamicLoad(vertex, particle, new Set(["associatedParticle"])); | ||
|
||
particles.push(vertex); | ||
} | ||
|
||
return [particles, links]; | ||
} | ||
} | ||
|
||
export class Track { | ||
static MIN_VERSION = "0.7.0"; | ||
static MAX_VERSION = "1.0.0"; | ||
|
||
constructor() { | ||
// Physics properties | ||
this.type = 0; | ||
this.chi2 = 0; | ||
this.ndf = 0; | ||
this.dEdx = 0; | ||
this.dEdxError = 0; | ||
this.radiusOfInnermostHit = 0; | ||
this.subdetectorHitNumbers = []; | ||
this.trackStates = []; | ||
this.dxQuantities = []; | ||
this.trackerHits = []; | ||
this.tracks = []; | ||
} | ||
|
||
static load(collection) { | ||
const particles = []; | ||
const links = createLinksManager(["trackerHits", "tracks"]); | ||
|
||
for (const [index, particle] of collection.entries()) { | ||
const track = new Track(); | ||
track.index = index; | ||
|
||
extractOneToManyLinks(links, ["trackerHits", "tracks"], track, particle); | ||
|
||
dynamicLoad(track, particle); | ||
|
||
particles.push(track); | ||
} | ||
|
||
return [particles, links]; | ||
} | ||
} | ||
|
||
export class GenericLink { | ||
// we may create a specific class for each type if needed | ||
constructor(id, from, to) { | ||
this.id = id; | ||
this.from = from; | ||
this.to = to; | ||
} | ||
} | ||
|
||
export function createLinksManager(types) { | ||
const links = {}; | ||
types.forEach((type) => (links[type] = [])); | ||
return links; | ||
} | ||
|
||
export function createGenericLink(id, from, { collectionID, index }) { | ||
const genericLink = new GenericLink(id, from, index); | ||
genericLink.collectionID = collectionID; | ||
return genericLink; | ||
} | ||
|
||
export function extractOneToManyLinks( | ||
linksManager, | ||
keys, | ||
newParticle, | ||
particleData | ||
) { | ||
for (const key of keys) { | ||
particleData[key].map((val) => { | ||
const link = createGenericLink( | ||
linksManager[key].length, | ||
newParticle.index, | ||
val | ||
); | ||
linksManager[key].push(link); | ||
newParticle[key].push(link.id); | ||
}); | ||
} | ||
} | ||
|
||
export function extractOneToOneLink( | ||
linksManager, | ||
key, | ||
newParticle, | ||
particleData | ||
) { | ||
const link = createGenericLink( | ||
linksManager[key].length, | ||
newParticle.index, | ||
particleData[key] | ||
); | ||
linksManager[key].push(link); | ||
newParticle[key] = link.id; | ||
} | ||
|
||
export const types = { | ||
"Cluster": Cluster, | ||
"ParticleID": ParticleID, | ||
"ReconstructedParticle": ReconstructedParticle, | ||
"Vertex": Vertex, | ||
"Track": Track, | ||
}; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
export class Version { | ||
constructor(string) { | ||
const [major, minor, patch] = string.split("."); | ||
this.major = parseInt(major); | ||
this.minor = parseInt(minor); | ||
this.patch = parseInt(patch); | ||
} | ||
|
||
toString() { | ||
return `${this.major}.${this.minor}.${this.patch}`; | ||
} | ||
|
||
greaterOrEqualThan(version) { | ||
return ( | ||
this.major > version.major || | ||
(this.major === version.major && | ||
(this.minor > version.minor || | ||
(this.minor === version.minor && this.patch >= version.patch))) | ||
); | ||
} | ||
|
||
lessOrEqualThan(version) { | ||
return ( | ||
this.major < version.major || | ||
(this.major === version.major && | ||
(this.minor < version.minor || | ||
(this.minor === version.minor && this.patch <= version.patch))) | ||
); | ||
} | ||
|
||
isBetween(version1, version2) { | ||
return this.greaterOrEqualThan(version1) && this.lessOrEqualThan(version2); | ||
} | ||
} | ||
|
||
export function compatible(type, version) { | ||
const minV = new Version(type.MIN_VERSION); | ||
const maxV = new Version(type.MAX_VERSION); | ||
|
||
const v = new Version(version); | ||
|
||
return v.isBetween(minV, maxV); | ||
} |
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
TL;DR: I like this approach, but we might have to come back to it in the future to make it a bit more flexible, and I just want to plant this information in your head, so that you can also start to think about it in the background :)
Nice. I think this goes into the right direction. I have checked the handling of the versions a bit below and I think it's OK. I have one more design question here, what would happen if e.g. the
Cluster
changes in EDM4hep in version1.2.3
(which one doesn't actually matter)? How would you then be able to work with "old" clusters and "new" clusters? Have you thought about that already?Just to give you some of the requirements here:
So what we would probably need is some way of reading the data members and relations depending on the version. One way that could be done already now would be to simply create a new
Cluster
class (with a slightly different name) and then use the mechanism that you already have in place now. The other option would be to do some dynamic handling at least of the reading part depending on the version.For displaying the potentially different information in the end, I am not yet sure how to handle this best, but for starting we could probably just have the set of all members that are in common (if there are changes).
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Right now the most simple way that I can think of, is to create multiple classes of Cluster, and choose the one that fits the version.
Well, almost all classes should have general
draw
,getData
, etc methods that each version has to implement.Yeah, exactly!
There is something similar already
But yes, thinking of a way to load the particles and manage them across dmx independently of the version is a must. I will design this.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I wasn't aware of the dynamic loading bit, thanks for pointing it out.
Another comment that adds a bit more information on how EDM4hep "behaves" (or should behave) when reading older versions. Users only ever see the latest version in memory, and also the interface they get will always be the latest version. All the schema evolution from the older version to the newer version happens before users actually see the data. That is why I would try to keep a similar behavior also here, i.e. once the data has been read the classes are at their latest version.
Having read through your comment, it might even be OK for now to just follow your approach, since
draw
andgetData
are already things that can be overriden, so we should in principle be quite flexible in case there is no immediately better design.