diff options
author | Thomas Watson | 2016-01-05 14:55:46 +0000 |
---|---|---|
committer | Thomas Watson | 2016-01-05 15:29:48 +0000 |
commit | e1aa6950b8be7c9e76b2df623d09eb8e25d1ef97 (patch) | |
tree | 2ca3b6daf3d5a8980a82444bec927b21041afbc4 /bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver | |
parent | de43c1f9a0b333cfb78be0dc5a1844dcae29e733 (diff) | |
download | rt.equinox.framework-e1aa6950b8be7c9e76b2df623d09eb8e25d1ef97.tar.gz rt.equinox.framework-e1aa6950b8be7c9e76b2df623d09eb8e25d1ef97.tar.xz rt.equinox.framework-e1aa6950b8be7c9e76b2df623d09eb8e25d1ef97.zip |
Bug 485217 - Update resolver implementation and default to using batch
resolves with a timeout
Diffstat (limited to 'bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver')
17 files changed, 3673 insertions, 1638 deletions
diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Candidates.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Candidates.java index 9b290dfd3..bfa80c6ec 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Candidates.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Candidates.java @@ -24,15 +24,15 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; -import java.util.LinkedHashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; -import org.apache.felix.resolver.util.CopyOnWriteSet; import org.apache.felix.resolver.util.CopyOnWriteList; +import org.apache.felix.resolver.util.CopyOnWriteSet; import org.apache.felix.resolver.util.OpenHashMap; import org.apache.felix.resolver.util.OpenHashMapList; import org.apache.felix.resolver.util.OpenHashMapSet; @@ -47,13 +47,21 @@ import org.osgi.resource.Resource; import org.osgi.resource.Wire; import org.osgi.resource.Wiring; import org.osgi.service.resolver.HostedCapability; -import org.osgi.service.resolver.ResolutionException; import org.osgi.service.resolver.ResolveContext; class Candidates { - public static final int MANDATORY = 0; - public static final int OPTIONAL = 1; + static class PopulateResult { + boolean success; + ResolutionError error; + List<Requirement> remaining; + Map<Requirement, List<Capability>> candidates; + + @Override + public String toString() { + return success ? "true" : error != null ? error.getMessage() : "???"; + } + } private final Set<Resource> m_mandatoryResources; // Maps a capability to requirements that match it. @@ -64,10 +72,7 @@ class Candidates // when a revision being resolved has fragments to attach to it. private final Map<Resource, WrappedResource> m_allWrappedHosts; // Map used when populating candidates to hold intermediate and final results. - private final Map<Resource, Object> m_populateResultCache; - - // Flag to signal if fragments are present in the candidate map. - private boolean m_fragmentsPresent = false; + private final OpenHashMap<Resource, PopulateResult> m_populateResultCache; private final Map<Resource, Boolean> m_validOnDemandResources; @@ -82,8 +87,8 @@ class Candidates Set<Resource> mandatoryResources, OpenHashMapSet<Capability, Requirement> dependentMap, OpenHashMapList<Requirement, Capability> candidateMap, - Map<Resource, WrappedResource> wrappedHosts, Map<Resource, Object> populateResultCache, - boolean fragmentsPresent, + Map<Resource, WrappedResource> wrappedHosts, + OpenHashMap<Resource, PopulateResult> populateResultCache, Map<Resource, Boolean> onDemandResources, Map<Capability, Requirement> substitutableMap, OpenHashMapSet<Requirement, Capability> delta) @@ -93,7 +98,6 @@ class Candidates m_candidateMap = candidateMap; m_allWrappedHosts = wrappedHosts; m_populateResultCache = populateResultCache; - m_fragmentsPresent = fragmentsPresent; m_validOnDemandResources = onDemandResources; m_subtitutableMap = substitutableMap; m_delta = delta; @@ -108,280 +112,184 @@ class Candidates m_dependentMap = new OpenHashMapSet<Capability, Requirement>(); m_candidateMap = new OpenHashMapList<Requirement, Capability>(); m_allWrappedHosts = new HashMap<Resource, WrappedResource>(); - m_populateResultCache = new LinkedHashMap<Resource, Object>(); + m_populateResultCache = new OpenHashMap<Resource, PopulateResult>(); m_validOnDemandResources = validOnDemandResources; - m_subtitutableMap = new LinkedHashMap<Capability, Requirement>(); + m_subtitutableMap = new OpenHashMap<Capability, Requirement>(); m_delta = new OpenHashMapSet<Requirement, Capability>(3); } - /** - * Returns the delta which is the differences in the candidates from the - * original Candidates permutation. - * @return the delta - */ - public Object getDelta() { - return m_delta; + public int getNbResources() + { + return m_populateResultCache.size(); } - /** - * Populates candidates for the specified revision. How a revision is - * resolved depends on its resolution type as follows: - * <ul> - * <li><tt>MANDATORY</tt> - must resolve and failure to do so throws an - * exception.</li> - * <li><tt>OPTIONAL</tt> - attempt to resolve, but no exception is thrown if - * the resolve fails.</li> - * <li><tt>ON_DEMAND</tt> - only resolve on demand; this only applies to - * fragments and will only resolve a fragment if its host is already - * selected as a candidate.</li> - * </ul> - * - * @param rc the resolve context used for populating the candidates. - * @param resource the resource whose candidates should be populated. - * @param resolution indicates the resolution type. - */ - public final void populate( - ResolveContext rc, Resource resource, int resolution) throws ResolutionException + public Map<Resource, Resource> getHosts() { - // Get the current result cache value, to make sure the revision - // hasn't already been populated. - Object cacheValue = m_populateResultCache.get(resource); - // Has been unsuccessfully populated. - if (cacheValue instanceof ResolutionException) - { - return; - } - // Has been successfully populated. - else if (cacheValue instanceof Boolean) - { - return; - } - - // We will always attempt to populate fragments, since this is necessary - // for ondemand attaching of fragment. However, we'll only attempt to - // populate optional non-fragment revisions if they aren't already - // resolved. - boolean isFragment = Util.isFragment(resource); - if (!isFragment && rc.getWirings().containsKey(resource)) - { - return; - } - - if (resolution == MANDATORY) - { - m_mandatoryResources.add(resource); - } - try + Map<Resource, Resource> hosts = new HashMap<Resource, Resource>(); + for (Resource res : m_mandatoryResources) { - // Try to populate candidates for the optional revision. - populateResource(rc, resource); + if (res instanceof WrappedResource) + { + res = ((WrappedResource) res).getDeclaredResource(); + } + if (!Util.isFragment(res)) + { + hosts.put(res, getWrappedHost(res)); + } } - catch (ResolutionException ex) + for (Capability cap : m_dependentMap.keySet()) { - // Only throw an exception if resolution is mandatory. - if (resolution == MANDATORY) + Resource res = cap.getResource(); + if (res instanceof WrappedResource) + { + res = ((WrappedResource) res).getDeclaredResource(); + } + if (!Util.isFragment(res)) { - throw ex; + hosts.put(res, getWrappedHost(res)); } } + return hosts; } /** - * Populates candidates for the specified revision. - * - * @param rc the resolver state used for populating the candidates. - * @param resource the revision whose candidates should be populated. + * Returns the delta which is the differences in the candidates from the + * original Candidates permutation. + * @return the delta */ -// TODO: FELIX3 - Modify to not be recursive. - @SuppressWarnings("unchecked") - private void populateResource(ResolveContext rc, Resource resource) throws ResolutionException + public Object getDelta() { - // Determine if we've already calculated this revision's candidates. - // The result cache will have one of three values: - // 1. A resolve exception if we've already attempted to populate the - // revision's candidates but were unsuccessful. - // 2. Boolean.TRUE indicating we've already attempted to populate the - // revision's candidates and were successful. - // 3. An array containing the cycle count, current map of candidates - // for already processed requirements, and a list of remaining - // requirements whose candidates still need to be calculated. - // For case 1, rethrow the exception. For case 2, simply return immediately. - // For case 3, this means we have a cycle so we should continue to populate - // the candidates where we left off and not record any results globally - // until we've popped completely out of the cycle. - - // Keeps track of the number of times we've reentered this method - // for the current revision. - Integer cycleCount = null; - - // Keeps track of the candidates we've already calculated for the - // current revision's requirements. - Map<Requirement, List<Capability>> localCandidateMap = null; - - // Keeps track of the current revision's requirements for which we - // haven't yet found candidates. - List<Requirement> remainingReqs = null; - - // Get the cache value for the current revision. - Object cacheValue = m_populateResultCache.get(resource); - - // This is case 1. - if (cacheValue instanceof ResolutionException) - { - throw (ResolutionException) cacheValue; - } - // This is case 2. - else if (cacheValue instanceof Boolean) - { - return; - } - // This is case 3. - else if (cacheValue != null) - { - // Increment and get the cycle count. - cycleCount = (Integer) (((Object[]) cacheValue)[0] = (Integer) ((Object[]) cacheValue)[0] + 1); - // Get the already populated candidates. - localCandidateMap = (Map) ((Object[]) cacheValue)[1]; - // Get the remaining requirements. - remainingReqs = (List) ((Object[]) cacheValue)[2]; - } - - // If there is no cache value for the current revision, then this is - // the first time we are attempting to populate its candidates, so - // do some one-time checks and initialization. - if ((remainingReqs == null) && (localCandidateMap == null)) - { - // Record cycle count. - cycleCount = 0; - - // Create a local map for populating candidates first, just in case - // the revision is not resolvable. - localCandidateMap = new HashMap<Requirement, List<Capability>>(); - - // Create a modifiable list of the revision's requirements. - remainingReqs = new ArrayList<Requirement>(resource.getRequirements(null)); + return m_delta; + } - // Add these value to the result cache so we know we are - // in the middle of populating candidates for the current - // revision. - m_populateResultCache.put(resource, - cacheValue = new Object[] { cycleCount, localCandidateMap, remainingReqs }); - } + public void addMandatoryResources(Collection<Resource> resources) + { + m_mandatoryResources.addAll(resources); + } - // If we have requirements remaining, then find candidates for them. - while (!remainingReqs.isEmpty()) + @SuppressWarnings("ThrowableResultOfMethodCallIgnored") + public ResolutionError populate(ResolveContext rc, Collection<Resource> resources) + { + Set<Resource> toRemove = new HashSet<Resource>(); + LinkedList<Resource> toPopulate = new LinkedList<Resource>(resources); + while (!toPopulate.isEmpty()) { - Requirement req = remainingReqs.remove(0); - - // Ignore non-effective and dynamic requirements. - String resolution = req.getDirectives() - .get(PackageNamespace.REQUIREMENT_RESOLUTION_DIRECTIVE); - if (!rc.isEffective(req) - || ((resolution != null) - && resolution.equals(PackageNamespace.RESOLUTION_DYNAMIC))) + Resource resource = toPopulate.getFirst(); + // Get cached result + PopulateResult result = m_populateResultCache.get(resource); + if (result == null) { - continue; + result = new PopulateResult(); + result.candidates = new OpenHashMap<Requirement, List<Capability>>(); + result.remaining = new ArrayList<Requirement>(resource.getRequirements(null)); + m_populateResultCache.put(resource, result); } - - // Process the candidates, removing any candidates that - // cannot resolve. - List<Capability> candidates = rc.findProviders(req); - ResolutionException rethrow = processCandidates(rc, resource, candidates); - - // First, due to cycles, makes sure we haven't already failed in - // a deeper recursion. - Object result = m_populateResultCache.get(resource); - if (result instanceof ResolutionException) + if (result.success || result.error != null) { - throw (ResolutionException) result; + toPopulate.removeFirst(); + continue; } - // Next, if are no candidates remaining and the requirement is not - // not optional, then record and throw a resolve exception. - else if (candidates.isEmpty() && !Util.isOptional(req)) + if (result.remaining.isEmpty()) { - if (Util.isFragment(resource) && rc.getWirings().containsKey(resource)) + toPopulate.removeFirst(); + result.success = true; + addCandidates(result.candidates); + result.candidates = null; + result.remaining = null; + if ((rc instanceof FelixResolveContext) && !Util.isFragment(resource)) { - // This is a fragment that is already resolved and there is no unresolved hosts to attach it to. - m_populateResultCache.put(resource, Boolean.TRUE); - return; - } - String msg = "Unable to resolve " + resource - + ": missing requirement " + req; - if (rethrow != null) - { - msg = msg + " [caused by: " + rethrow.getMessage() + "]"; + Collection<Resource> ondemandFragments = ((FelixResolveContext) rc).getOndemandResources(resource); + for (Resource fragment : ondemandFragments) + { + Boolean valid = m_validOnDemandResources.get(fragment); + if (valid == null) + { + // Mark this resource as a valid on demand resource + m_validOnDemandResources.put(fragment, Boolean.TRUE); + valid = Boolean.TRUE; + } + if (valid) + { + // This resource is a valid on demand resource; + // populate it now, consider it optional + toPopulate.addFirst(fragment); + } + } } - rethrow = new ResolutionException(msg, null, Collections.singleton(req)); - m_populateResultCache.put(resource, rethrow); - throw rethrow; + continue; } - // Otherwise, if we actually have candidates for the requirement, then - // add them to the local candidate map. - else if (candidates.size() > 0) + // We have a requirement to process + Requirement requirement = result.remaining.remove(0); + if (!isEffective(rc, requirement)) { - localCandidateMap.put(req, candidates); + continue; } - } - - // If we are exiting from a cycle then decrement - // cycle counter, otherwise record the result. - if (cycleCount > 0) - { - ((Object[]) cacheValue)[0] = cycleCount - 1; - } - else if (cycleCount == 0) - { - // Record that the revision was successfully populated. - m_populateResultCache.put(resource, Boolean.TRUE); - // FELIX-4825: Verify candidate map in case of cycles and optional requirements - for (Iterator<Map.Entry<Requirement, List<Capability>>> it = localCandidateMap.entrySet().iterator(); it.hasNext();) + List<Capability> candidates = rc.findProviders(requirement); + LinkedList<Resource> newToPopulate = new LinkedList<Resource>(); + ResolutionError thrown = processCandidates(rc, newToPopulate, requirement, candidates); + if (candidates.isEmpty() && !Util.isOptional(requirement)) { - Map.Entry<Requirement, List<Capability>> entry = it.next(); - for (Iterator<Capability> it2 = entry.getValue().iterator(); it2.hasNext();) + if (Util.isFragment(resource) && rc.getWirings().containsKey(resource)) { - if (m_populateResultCache.get(it2.next().getResource()) instanceof ResolutionException) - { - it2.remove(); - } + // This is a fragment that is already resolved and there is no unresolved hosts to attach it to. + result.success = true; } - if (entry.getValue().isEmpty()) + else { - it.remove(); + result.error = new MissingRequirementError(requirement, thrown); + toRemove.add(resource); } + toPopulate.removeFirst(); } - // Merge local candidate map into global candidate map. - if (localCandidateMap.size() > 0) - { - add(localCandidateMap); - } - if ((rc instanceof FelixResolveContext) && !Util.isFragment(resource)) + else { - Collection<Resource> ondemandFragments = ((FelixResolveContext) rc).getOndemandResources(resource); - for (Resource fragment : ondemandFragments) + if (!candidates.isEmpty()) { - Boolean valid = m_validOnDemandResources.get(fragment); - if (valid == null) - { - // Mark this resource as a valid on demand resource - m_validOnDemandResources.put(fragment, Boolean.TRUE); - valid = Boolean.TRUE; - } - if (valid) - { - // This resource is a valid on demand resource; - // populate it now, consider it optional - populate(rc, fragment, OPTIONAL); - } + result.candidates.put(requirement, candidates); + } + if (!newToPopulate.isEmpty()) + { + toPopulate.addAll(0, newToPopulate); } } } + + while (!toRemove.isEmpty()) + { + Iterator<Resource> iterator = toRemove.iterator(); + Resource resource = iterator.next(); + iterator.remove(); + remove(resource, toRemove); + } + return null; + } + + private boolean isEffective(ResolveContext rc, Requirement req) { + if (!rc.isEffective(req)) { + return false; + } + String res = req.getDirectives().get(PackageNamespace.REQUIREMENT_RESOLUTION_DIRECTIVE); + return !PackageNamespace.RESOLUTION_DYNAMIC.equals(res); + } + + private boolean isMandatory(ResolveContext rc, Requirement requirement) { + // The requirement is optional + if (Util.isOptional(requirement)) { + return false; + } + // This is a fragment that is already resolved and there is no unresolved hosts to attach it to + Resource resource = requirement.getResource(); + if (Util.isFragment(resource) && rc.getWirings().containsKey(resource)) { + return false; + } + return true; } private void populateSubstitutables() { - for (Map.Entry<Resource, Object> populated : m_populateResultCache.entrySet()) + for (Map.Entry<Resource, PopulateResult> populated : m_populateResultCache.fast()) { - if (populated.getValue() instanceof Boolean) + if (populated.getValue().success) { populateSubstitutables(populated.getKey()); } @@ -391,46 +299,43 @@ class Candidates private void populateSubstitutables(Resource resource) { // Collect the package names exported - List<Capability> packageExports = resource.getCapabilities(PackageNamespace.PACKAGE_NAMESPACE); - if (packageExports.isEmpty()) + OpenHashMap<String, List<Capability>> exportNames = new OpenHashMap<String, List<Capability>>() { + @Override + protected List<Capability> compute(String s) { + return new ArrayList<Capability>(1); + } + }; + for (Capability packageExport : resource.getCapabilities(null)) { - return; + if (!PackageNamespace.PACKAGE_NAMESPACE.equals(packageExport.getNamespace())) + { + continue; + } + String packageName = (String) packageExport.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE); + List<Capability> caps = exportNames.getOrCompute(packageName); + caps.add(packageExport); } - List<Requirement> packageImports = resource.getRequirements(PackageNamespace.PACKAGE_NAMESPACE); - if (packageImports.isEmpty()) + if (exportNames.isEmpty()) { return; } - Map<String, List<Capability>> exportNames = new LinkedHashMap<String, List<Capability>>(); - for (Capability packageExport : packageExports) + // Check if any requirements substitute one of the exported packages + for (Requirement req : resource.getRequirements(null)) { - String packageName = (String) packageExport.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE); - List<Capability> caps = exportNames.get(packageName); - if (caps == null) + if (!PackageNamespace.PACKAGE_NAMESPACE.equals(req.getNamespace())) { - caps = new ArrayList<Capability>(1); - exportNames.put(packageName, caps); + continue; } - caps.add(packageExport); - } - // Check if any requirements substitute one of the exported packages - for (Requirement req : packageImports) - { List<Capability> substitutes = m_candidateMap.get(req); if (substitutes != null && !substitutes.isEmpty()) { - String packageName = (String) substitutes.iterator().next().getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE); + String packageName = (String) substitutes.get(0).getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE); List<Capability> exportedPackages = exportNames.get(packageName); if (exportedPackages != null) { // The package is exported; // Check if the requirement only has the bundle's own export as candidates - substitutes = new ArrayList<Capability>(substitutes); - for (Capability exportedPackage : exportedPackages) - { - substitutes.remove(exportedPackage); - } - if (!substitutes.isEmpty()) + if (!exportedPackages.containsAll(substitutes)) { for (Capability exportedPackage : exportedPackages) { @@ -447,9 +352,9 @@ class Candidates private static final int SUBSTITUTED = 2; private static final int EXPORTED = 3; - void checkSubstitutes(List<Candidates> importPermutations) throws ResolutionException + ResolutionError checkSubstitutes(List<Candidates> importPermutations) { - Map<Capability, Integer> substituteStatuses = new LinkedHashMap<Capability, Integer>(m_subtitutableMap.size()); + OpenHashMap<Capability, Integer> substituteStatuses = new OpenHashMap<Capability, Integer>(m_subtitutableMap.size()); for (Capability substitutable : m_subtitutableMap.keySet()) { // initialize with unprocessed @@ -462,16 +367,8 @@ class Candidates } // Remove any substituted exports from candidates - for (Map.Entry<Capability, Integer> substituteStatus : substituteStatuses.entrySet()) + for (Map.Entry<Capability, Integer> substituteStatus : substituteStatuses.fast()) { - if (substituteStatus.getValue() == SUBSTITUTED) - { - if (m_dependentMap.isEmpty()) - { - // make sure the dependents are populated - populateDependents(); - } - } // add a permutation that imports a different candidate for the substituted if possible Requirement substitutedReq = m_subtitutableMap.get(substituteStatus.getKey()); if (substitutedReq != null) @@ -516,15 +413,14 @@ class Candidates } else { - String msg = "Unable to resolve " + dependent.getResource() - + ": missing requirement " + dependent; - throw new ResolutionException(msg, null, Collections.singleton(dependent)); + return new MissingRequirementError(dependent); } } } } } } + return null; } private boolean isSubstituted(Capability substitutableCap, Map<Capability, Integer> substituteStatuses) @@ -581,9 +477,9 @@ class Candidates return false; } - public void populateDynamic( + public ResolutionError populateDynamic( ResolveContext rc, Resource resource, - Requirement req, List<Capability> candidates) throws ResolutionException + Requirement req, List<Capability> candidates) { // Record the revision associated with the dynamic require // as a mandatory revision. @@ -591,45 +487,50 @@ class Candidates // Process the candidates, removing any candidates that // cannot resolve. - ResolutionException rethrow = processCandidates(rc, resource, candidates); + // TODO: verify the two following statements + LinkedList<Resource> toPopulate = new LinkedList<Resource>(); + ResolutionError rethrow = processCandidates(rc, toPopulate, req, candidates); // Add the dynamic imports candidates. // Make sure this is done after the call to processCandidates since we want to ensure // fragment candidates are properly hosted before adding the candidates list which makes a copy - add(req, candidates); + addCandidates(req, candidates); + + populate(rc, toPopulate); + + CopyOnWriteList<Capability> caps = m_candidateMap.get(req); + if (caps != null) + { + candidates.retainAll(caps); + } + else + { + candidates.clear(); + } if (candidates.isEmpty()) { if (rethrow == null) { - rethrow = new ResolutionException( - "Dynamic import failed.", null, Collections.singleton(req)); + rethrow = new DynamicImportFailed(req); } - throw rethrow; + return rethrow; } - m_populateResultCache.put(resource, Boolean.TRUE); + PopulateResult result = new PopulateResult(); + result.success = true; + m_populateResultCache.put(resource, result); + return null; } - /** - * This method performs common processing on the given set of candidates. - * Specifically, it removes any candidates which cannot resolve and it - * synthesizes candidates for any candidates coming from any attached - * fragments, since fragment capabilities only appear once, but technically - * each host represents a unique capability. - * - * @param rc the resolver state. - * @param resource the revision being resolved. - * @param candidates the candidates to process. - * @return a resolve exception to be re-thrown, if any, or null. - */ - private ResolutionException processCandidates( + private ResolutionError processCandidates( ResolveContext rc, - Resource resource, + LinkedList<Resource> toPopulate, + Requirement req, List<Capability> candidates) { // Get satisfying candidates and populate their candidates if necessary. - ResolutionException rethrow = null; + ResolutionError rethrow = null; Set<Capability> fragmentCands = null; for (Iterator<Capability> itCandCap = candidates.iterator(); itCandCap.hasNext();) @@ -663,21 +564,29 @@ class Candidates // of recursion; thus, any avoided recursion results in fewer // exceptions to chain when an error does occur. if ((isFragment || !rc.getWirings().containsKey(candCap.getResource())) - && !candCap.getResource().equals(resource)) + && !candCap.getResource().equals(req.getResource())) { - try - { - populateResource(rc, candCap.getResource()); - } - catch (ResolutionException ex) + PopulateResult result = m_populateResultCache.get(candCap.getResource()); + if (result != null) { - if (rethrow == null) + if (result.error != null) + { + if (rethrow == null) + { + rethrow = result.error; + } + // Remove the candidate since we weren't able to + // populate its candidates. + itCandCap.remove(); + } + else if (!result.success) { - rethrow = ex; + toPopulate.add(candCap.getResource()); } - // Remove the candidate since we weren't able to - // populate its candidates. - itCandCap.remove(); + } + else + { + toPopulate.add(candCap.getResource()); } } } @@ -743,15 +652,14 @@ class Candidates public boolean isPopulated(Resource resource) { - Object value = m_populateResultCache.get(resource); - return ((value != null) && (value instanceof Boolean)); + PopulateResult value = m_populateResultCache.get(resource); + return (value != null && value.success); } - public ResolutionException getResolveException(Resource resource) + public ResolutionError getResolutionError(Resource resource) { - Object value = m_populateResultCache.get(resource); - return ((value != null) && (value instanceof ResolutionException)) - ? (ResolutionException) value : null; + PopulateResult value = m_populateResultCache.get(resource); + return value != null ? value.error : null; } /** @@ -764,15 +672,14 @@ class Candidates * @param req the requirement to add. * @param candidates the candidates matching the requirement. */ - private void add(Requirement req, List<Capability> candidates) + private void addCandidates(Requirement req, List<Capability> candidates) { - if (req.getNamespace().equals(HostNamespace.HOST_NAMESPACE)) - { - m_fragmentsPresent = true; - } - // Record the candidates. m_candidateMap.put(req, new CopyOnWriteList<Capability>(candidates)); + for (Capability cap : candidates) + { + m_dependentMap.getOrCompute(cap).add(req); + } } /** @@ -782,11 +689,11 @@ class Candidates * * @param candidates the bulk requirements and candidates to add. */ - private void add(Map<Requirement, List<Capability>> candidates) + private void addCandidates(Map<Requirement, List<Capability>> candidates) { - for (Entry<Requirement, List<Capability>> entry : candidates.entrySet()) + for (Map.Entry<Requirement, List<Capability>> entry : candidates.entrySet()) { - add(entry.getKey(), entry.getValue()); + addCandidates(entry.getKey(), entry.getValue()); } } @@ -827,7 +734,7 @@ class Candidates List<Capability> candidates = m_candidateMap.get(req); if (candidates != null && !candidates.isEmpty()) { - return m_candidateMap.get(req).get(0); + return candidates.get(0); } return null; } @@ -842,11 +749,7 @@ class Candidates m_candidateMap.remove(req); } // Update the delta with the removed capability - CopyOnWriteSet<Capability> capPath = m_delta.get(req); - if (capPath == null) { - capPath = new CopyOnWriteSet<Capability>(); - m_delta.put(req, capPath); - } + CopyOnWriteSet<Capability> capPath = m_delta.getOrCompute(req); capPath.add(cap); } @@ -855,11 +758,7 @@ class Candidates List<Capability> l = m_candidateMap.get(req); l.removeAll(caps); // Update candidates delta with the removed capabilities. - CopyOnWriteSet<Capability> capPath = m_delta.get(req); - if (capPath == null) { - capPath = new CopyOnWriteSet<Capability>(); - m_delta.put(req, capPath); - } + CopyOnWriteSet<Capability> capPath = m_delta.getOrCompute(req); capPath.addAll(caps); return l; } @@ -878,20 +777,17 @@ class Candidates * satisfied by the fragment will end up having the two hosts as potential * candidates, rather than the single fragment. * - * @throws org.osgi.service.resolver.ResolutionException if the removal of any unselected fragments + * @return ResolutionError if the removal of any unselected fragments * result in the root module being unable to resolve. */ - public void prepare(ResolveContext rc) throws ResolutionException + public ResolutionError prepare(ResolveContext rc) { // Maps a host capability to a map containing its potential fragments; // the fragment map maps a fragment symbolic name to a map that maps // a version to a list of fragments requirements matching that symbolic // name and version. - Map<Capability, Map<String, Map<Version, List<Requirement>>>> hostFragments = Collections.emptyMap(); - if (m_fragmentsPresent) - { - hostFragments = populateDependents(); - } + Map<Capability, Map<String, Map<Version, List<Requirement>>>> hostFragments = + getHostFragments(); // This method performs the following steps: // 1. Select the fragments to attach to a given host. @@ -962,9 +858,7 @@ class Candidates // Step 3 for (Resource fragment : unselectedFragments) { - removeResource(fragment, - new ResolutionException( - "Fragment was not selected for attachment: " + fragment)); + removeResource(fragment, new FragmentNotSelectedError(fragment)); } // Step 4 @@ -1081,39 +975,32 @@ class Candidates { if (!isPopulated(resource)) { - throw getResolveException(resource); + return getResolutionError(resource); } } populateSubstitutables(); - m_candidateMap.concat(); - m_dependentMap.concat(); + m_candidateMap.trim(); + m_dependentMap.trim(); + + return null; } // Maps a host capability to a map containing its potential fragments; // the fragment map maps a fragment symbolic name to a map that maps // a version to a list of fragments requirements matching that symbolic // name and version. - private Map<Capability, Map<String, Map<Version, List<Requirement>>>> populateDependents() + private Map<Capability, Map<String, Map<Version, List<Requirement>>>> getHostFragments() { Map<Capability, Map<String, Map<Version, List<Requirement>>>> hostFragments = new HashMap<Capability, Map<String, Map<Version, List<Requirement>>>>(); - for (Entry<Requirement, CopyOnWriteList<Capability>> entry : m_candidateMap.entrySet()) + for (Entry<Requirement, CopyOnWriteList<Capability>> entry : m_candidateMap.fast()) { Requirement req = entry.getKey(); List<Capability> caps = entry.getValue(); for (Capability cap : caps) { - // Record the requirement as dependent on the capability. - CopyOnWriteSet<Requirement> dependents = m_dependentMap.get(cap); - if (dependents == null) - { - dependents = new CopyOnWriteSet<Requirement>(); - m_dependentMap.put(cap, dependents); - } - dependents.add(req); - // Keep track of hosts and associated fragments. if (req.getNamespace().equals(HostNamespace.HOST_NAMESPACE)) { @@ -1156,14 +1043,14 @@ class Candidates * is no other candidate. * * @param resource the module to remove. - * @throws ResolutionException if removing the module caused the resolve to - * fail. + * @param ex the resolution error */ - private void removeResource(Resource resource, ResolutionException ex) - throws ResolutionException + private void removeResource(Resource resource, ResolutionError ex) { // Add removal reason to result cache. - m_populateResultCache.put(resource, ex); + PopulateResult result = m_populateResultCache.get(resource); + result.success = false; + result.error = ex; // Remove from dependents. Set<Resource> unresolvedResources = new HashSet<Resource>(); remove(resource, unresolvedResources); @@ -1186,11 +1073,8 @@ class Candidates * @param unresolvedResources a list to containing any additional modules * that that became unresolved as a result of removing this module and will * also need to be removed. - * @throws ResolutionException if removing the module caused the resolve to - * fail. */ private void remove(Resource resource, Set<Resource> unresolvedResources) - throws ResolutionException { for (Requirement r : resource.getRequirements(null)) { @@ -1232,11 +1116,8 @@ class Candidates * @param unresolvedResources a list to containing any additional modules * that that became unresolved as a result of removing this module and will * also need to be removed. - * @throws ResolutionException if removing the module caused the resolve to - * fail. */ private void remove(Capability c, Set<Resource> unresolvedResources) - throws ResolutionException { Set<Requirement> dependents = m_dependentMap.remove(c); if (dependents != null) @@ -1250,11 +1131,13 @@ class Candidates m_candidateMap.remove(r); if (!Util.isOptional(r)) { - String msg = "Unable to resolve " + r.getResource() - + ": missing requirement " + r; - m_populateResultCache.put( - r.getResource(), - new ResolutionException(msg, null, Collections.singleton(r))); + PopulateResult result = m_populateResultCache.get(r.getResource()); + if (result != null) + { + result.success = false; + result.error = + new MissingRequirementError(r, m_populateResultCache.get(c.getResource()).error); + } unresolvedResources.add(r.getResource()); } } @@ -1276,7 +1159,6 @@ class Candidates m_candidateMap.deepClone(), m_allWrappedHosts, m_populateResultCache, - m_fragmentsPresent, m_validOnDemandResources, m_subtitutableMap, m_delta.deepClone()); @@ -1370,4 +1252,66 @@ class Candidates } } + static class DynamicImportFailed extends ResolutionError { + + private final Requirement requirement; + + public DynamicImportFailed(Requirement requirement) { + this.requirement = requirement; + } + + public String getMessage() { + return "Dynamic import failed."; + } + + public Collection<Requirement> getUnresolvedRequirements() { + return Collections.singleton(requirement); + } + + } + + static class FragmentNotSelectedError extends ResolutionError { + + private final Resource resource; + + public FragmentNotSelectedError(Resource resource) { + this.resource = resource; + } + + public String getMessage() { + return "Fragment was not selected for attachment: " + resource; + } + + } + + static class MissingRequirementError extends ResolutionError { + + private final Requirement requirement; + private final ResolutionError cause; + + public MissingRequirementError(Requirement requirement) { + this(requirement, null); + } + + public MissingRequirementError(Requirement requirement, ResolutionError cause) { + this.requirement = requirement; + this.cause = cause; + } + + public String getMessage() { + String msg = "Unable to resolve " + requirement.getResource() + + ": missing requirement " + requirement; + if (cause != null) + { + msg = msg + " [caused by: " + cause.getMessage() + "]"; + } + return msg; + } + + public Collection<Requirement> getUnresolvedRequirements() { + return Collections.singleton(requirement); + } + + } + } diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/FelixResolveContext.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/FelixResolveContext.java index 502a7e459..502a7e459 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/FelixResolveContext.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/FelixResolveContext.java diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Logger.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Logger.java index fd3a8a751..d8c1b3a80 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Logger.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Logger.java @@ -75,14 +75,23 @@ public class Logger _log(level, msg, throwable); } + public boolean isDebugEnabled() + { + return m_logLevel >= LOG_DEBUG; + } + + public final void debug(String msg) + { + _log(LOG_DEBUG, msg, null); + } + protected void doLog(int level, String msg, Throwable throwable) { if (level > m_logLevel) { return; } - String s = ""; - s = s + msg; + String s = msg; if (throwable != null) { s = s + " (" + throwable + ")"; @@ -120,7 +129,7 @@ public class Logger } } - public void logUsesConstraintViolation(Resource resource, ResolutionException error) + public void logUsesConstraintViolation(Resource resource, ResolutionError error) { // do nothing by default } diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/ResolutionError.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/ResolutionError.java new file mode 100755 index 000000000..04138392f --- /dev/null +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/ResolutionError.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.felix.resolver; + +import org.osgi.resource.Requirement; +import org.osgi.service.resolver.ResolutionException; + +import java.util.Collection; +import java.util.Collections; + +/** + * Resolution error. + * This class contains all the needed information to build the ResolutionException + * without the need to actually compute a user friendly message, which can be + * quite time consuming. + */ +public abstract class ResolutionError { + + public abstract String getMessage(); + + public Collection<Requirement> getUnresolvedRequirements() { + return Collections.emptyList(); + } + + public ResolutionException toException() { + return new ResolutionException(getMessage(), null, getUnresolvedRequirements()); + } + + @Override + public String toString() { + return getMessage(); + } +} diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/ResolverImpl.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/ResolverImpl.java index 2264826a8..ded683fd7 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/ResolverImpl.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/ResolverImpl.java @@ -18,19 +18,28 @@ */ package org.apache.felix.resolver; +import java.security.AccessControlContext; +import java.security.AccessController; +import java.security.PrivilegedAction; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; - +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; +import java.util.concurrent.Executor; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.felix.resolver.util.ArrayMap; +import org.apache.felix.resolver.util.OpenHashMap; import org.osgi.framework.namespace.BundleNamespace; import org.osgi.framework.namespace.ExecutionEnvironmentNamespace; import org.osgi.framework.namespace.HostNamespace; @@ -49,8 +58,17 @@ import org.osgi.service.resolver.Resolver; public class ResolverImpl implements Resolver { + private final AccessControlContext m_acc = + System.getSecurityManager() != null ? + AccessController.getContext() : + null; + private final Logger m_logger; + private final int m_parallelism; + + private final Executor m_executor; + // Note this class is not thread safe. // Only use in the context of a single thread. class ResolveSession @@ -69,25 +87,13 @@ public class ResolverImpl implements Resolver // removed the offending capabilities private Candidates m_multipleCardCandidates = null; - private final Map<Capability, Set<Capability>> m_packageSourcesCache = new HashMap<Capability, Set<Capability>>(256); - - private final Map<String, List<String>> m_usesCache = new HashMap<String, List<String>>(); + private final ConcurrentMap<String, List<String>> m_usesCache = new ConcurrentHashMap<String, List<String>>(); ResolveSession(ResolveContext resolveContext) { m_resolveContext = resolveContext; } - List<Candidates> getUsesPermutations() - { - return m_usesPermutations; - } - - List<Candidates> getImportPermutations() - { - return m_importPermutations; - } - Candidates getMultipleCardCandidates() { return m_multipleCardCandidates; @@ -98,33 +104,87 @@ public class ResolverImpl implements Resolver m_multipleCardCandidates = multipleCardCandidates; } - Map<Capability, Set<Capability>> getPackageSourcesCache() - { - return m_packageSourcesCache; - } - ResolveContext getContext() { return m_resolveContext; } - public Map<String, List<String>> getUsesCache() { + public ConcurrentMap<String, List<String>> getUsesCache() { return m_usesCache; } } public ResolverImpl(Logger logger) { - m_logger = logger; + this(logger, Runtime.getRuntime().availableProcessors()); + } + + public ResolverImpl(Logger logger, int parallelism) + { + this.m_logger = logger; + this.m_parallelism = parallelism; + this.m_executor = null; + } + + public ResolverImpl(Logger logger, Executor executor) + { + this.m_logger = logger; + this.m_parallelism = -1; + this.m_executor = executor; } public Map<Resource, List<Wire>> resolve(ResolveContext rc) throws ResolutionException { + if (m_executor != null) + { + return resolve(rc, m_executor); + } + else if (m_parallelism > 1) + { + final ExecutorService executor = + System.getSecurityManager() != null ? + AccessController.doPrivileged( + new PrivilegedAction<ExecutorService>() + { + public ExecutorService run() + { + return Executors.newFixedThreadPool(m_parallelism); + } + }, m_acc) + : + Executors.newFixedThreadPool(m_parallelism); + try + { + return resolve(rc, executor); + } + finally + { + if (System.getSecurityManager() != null) + { + AccessController.doPrivileged(new PrivilegedAction<Void>(){ + public Void run() { + executor.shutdownNow(); + return null; + } + }, m_acc); + } + else + { + executor.shutdownNow(); + } + } + } + else + { + return resolve(rc, new DumbExecutor()); + } + } + + public Map<Resource, List<Wire>> resolve(ResolveContext rc, Executor executor) throws ResolutionException + { ResolveSession session = new ResolveSession(rc); Map<Resource, List<Wire>> wireMap = new HashMap<Resource, List<Wire>>(); - Map<Resource, Packages> resourcePkgMap = - new HashMap<Resource, Packages>(); // Make copies of arguments in case we want to modify them. Collection<Resource> mandatoryResources = new ArrayList<Resource>(rc.getMandatoryResources()); @@ -142,35 +202,38 @@ public class ResolverImpl implements Resolver // Create object to hold all candidates. Candidates allCandidates = new Candidates(validOnDemandResources); + List<Resource> mandatory = new ArrayList<Resource>(); + List<Resource> toPopulate = new ArrayList<Resource>(); + // Populate mandatory resources; since these are mandatory // resources, failure throws a resolve exception. - for (Iterator<Resource> it = mandatoryResources.iterator(); - it.hasNext();) + for (Resource resource : mandatoryResources) { - Resource resource = it.next(); if (Util.isFragment(resource) || (rc.getWirings().get(resource) == null)) { - allCandidates.populate(rc, resource, Candidates.MANDATORY); - } - else - { - it.remove(); + mandatory.add(resource); + toPopulate.add(resource); } } - // Populate optional resources; since these are optional // resources, failure does not throw a resolve exception. for (Resource resource : optionalResources) { - boolean isFragment = Util.isFragment(resource); - if (isFragment || (rc.getWirings().get(resource) == null)) + if (Util.isFragment(resource) || (rc.getWirings().get(resource) == null)) { - allCandidates.populate(rc, resource, Candidates.OPTIONAL); + toPopulate.add(resource); } } + allCandidates.addMandatoryResources(mandatory); + allCandidates.populate(rc, toPopulate); + // Merge any fragments into hosts. - allCandidates.prepare(rc); + ResolutionError rethrow = allCandidates.prepare(rc); + if (rethrow != null) + { + throw rethrow.toException(); + } // Create a combined list of populated resources; for // optional resources. We do not need to consider ondemand @@ -186,14 +249,18 @@ public class ResolverImpl implements Resolver } } - List<Candidates> usesPermutations = session.getUsesPermutations(); - List<Candidates> importPermutations = session.getImportPermutations(); + // Holds candidate permutations based on permutating "uses" chains. + // These permutations are given higher priority. + List<Candidates> usesPermutations = new ArrayList<Candidates>(); + // Holds candidate permutations based on permutating requirement candidates. + // These permutations represent backtracking on previous decisions. + List<Candidates> importPermutations = new ArrayList<Candidates>(); + // Holds candidate permutations based on substituted packages + List<Candidates> substPermutations = new ArrayList<Candidates>(); // Record the initial candidate permutation. usesPermutations.add(allCandidates); - ResolutionException rethrow = null; - // If a populated resource is a fragment, then its host // must ultimately be verified, so store its host requirement // to use for package space calculation. @@ -209,18 +276,26 @@ public class ResolverImpl implements Resolver } Set<Object> processedDeltas = new HashSet<Object>(); - Map<Resource, ResolutionException> faultyResources = null; + Map<Resource, ResolutionError> faultyResources = null; do { - allCandidates = (usesPermutations.size() > 0) - ? usesPermutations.remove(0) - : (importPermutations.size() > 0 - ? importPermutations.remove(0) - : null); - if (allCandidates == null) + if (!usesPermutations.isEmpty()) + { + allCandidates = usesPermutations.remove(0); + } + else if (!importPermutations.isEmpty()) + { + allCandidates = importPermutations.remove(0); + } + else if (!substPermutations.isEmpty()) + { + allCandidates = substPermutations.remove(0); + } + else { break; } + // The delta is used to detect that we have already processed this particular permutation if (!processedDeltas.add(allCandidates.getDelta())) { @@ -229,10 +304,6 @@ public class ResolverImpl implements Resolver continue; } - rethrow = null; - - resourcePkgMap.clear(); - session.getPackageSourcesCache().clear(); // Null out each time a new permutation is attempted. // We only use this to store a valid permutation which is a // delta of the current permutation. @@ -240,26 +311,16 @@ public class ResolverImpl implements Resolver //allCandidates.dump(); - Map<Resource, ResolutionException> currentFaultyResources = null; - try - { - allCandidates.checkSubstitutes(importPermutations); - } - catch (ResolutionException e) + rethrow = allCandidates.checkSubstitutes(substPermutations); + if (rethrow != null) { - rethrow = e; continue; } - // Reuse a resultCache map for checking package consistency - // for all resources. - Map<Resource, Object> resultCache = - new HashMap<Resource, Object>(allResources.size()); - // Check the package space consistency for all 'root' resources. + // Compute the list of hosts + Map<Resource, Resource> hosts = new OpenHashMap<Resource, Resource>(); for (Resource resource : allResources) { - Resource target = resource; - // If we are resolving a fragment, then get its // host candidate and verify it instead. Requirement hostReq = hostReqs.get(resource); @@ -273,46 +334,30 @@ public class ResolverImpl implements Resolver { continue; } - target = hostCap.getResource(); + resource = hostCap.getResource(); } + hosts.put(resource, allCandidates.getWrappedHost(resource)); + } - calculatePackageSpaces( - session, allCandidates.getWrappedHost(target), allCandidates, - resourcePkgMap, new HashMap<Capability, Set<Resource>>(256), - new HashSet<Resource>(64)); -//System.out.println("+++ PACKAGE SPACES START +++"); -//dumpResourcePkgMap(resourcePkgMap); -//System.out.println("+++ PACKAGE SPACES END +++"); + Map<Resource, ResolutionError> currentFaultyResources = new HashMap<Resource, ResolutionError>(); - try - { - checkPackageSpaceConsistency( - session, allCandidates.getWrappedHost(target), - allCandidates, resourcePkgMap, resultCache); - } - catch (ResolutionException ex) - { - rethrow = ex; - if (currentFaultyResources == null) - { - currentFaultyResources = new HashMap<Resource, ResolutionException>(); - } - Resource faultyResource = resource; - // check that the faulty requirement is not from a fragment - for (Requirement faultyReq : ex.getUnresolvedRequirements()) - { - if (faultyReq instanceof WrappedRequirement) - { - faultyResource = - ((WrappedRequirement) faultyReq) - .getDeclaredRequirement().getResource(); - break; - } - } - currentFaultyResources.put(faultyResource, ex); - } - } - if (currentFaultyResources != null) + List<Candidates> newUses = new ArrayList<Candidates>(); + List<Candidates> newImports = new ArrayList<Candidates>(); + + rethrow = checkConsistency( + executor, + session, + newUses, + newImports, + allCandidates, + currentFaultyResources, + hosts, + false); + + usesPermutations.addAll(0, newUses); + importPermutations.addAll(0, newImports); + + if (!currentFaultyResources.isEmpty()) { if (faultyResources == null) { @@ -349,14 +394,14 @@ public class ResolverImpl implements Resolver } } // log all the resolution exceptions for the uses constraint violations - for (Map.Entry<Resource, ResolutionException> usesError : faultyResources.entrySet()) + for (Map.Entry<Resource, ResolutionError> usesError : faultyResources.entrySet()) { m_logger.logUsesConstraintViolation(usesError.getKey(), usesError.getValue()); } } if (!retry) { - throw rethrow; + throw rethrow.toException(); } } // If there is no exception to rethrow, then this was a clean @@ -395,7 +440,7 @@ public class ResolverImpl implements Resolver wireMap = populateWireMap( rc, allCandidates.getWrappedHost(target), - resourcePkgMap, wireMap, allCandidates); + wireMap, allCandidates); } } } @@ -403,11 +448,7 @@ public class ResolverImpl implements Resolver finally { // Always clear the state. - session.getUsesPermutations().clear(); - session.getImportPermutations().clear(); session.setMultipleCardCandidates(null); - // TODO this was not cleared out before; but it seems it should be - session.getPackageSourcesCache().clear(); } } while (retry); @@ -415,6 +456,49 @@ public class ResolverImpl implements Resolver return wireMap; } + private ResolutionError checkConsistency( + Executor executor, + ResolveSession session, + List<Candidates> usesPermutations, + List<Candidates> importPermutations, + Candidates allCandidates, + Map<Resource, ResolutionError> currentFaultyResources, + Map<Resource, Resource> hosts, + boolean dynamic) + { + // Calculate package spaces + Map<Resource, Packages> resourcePkgMap = + calculatePackageSpaces(executor, session, allCandidates, hosts.values()); + ResolutionError error = null; + // Check package consistency + Map<Resource, Object> resultCache = + new OpenHashMap<Resource, Object>(resourcePkgMap.size()); + for (Entry<Resource, Resource> entry : hosts.entrySet()) + { + ResolutionError rethrow = checkPackageSpaceConsistency( + session, usesPermutations, importPermutations, entry.getValue(), + allCandidates, dynamic, resourcePkgMap, resultCache); + if (rethrow != null) + { + Resource faultyResource = entry.getKey(); + // check that the faulty requirement is not from a fragment + for (Requirement faultyReq : rethrow.getUnresolvedRequirements()) + { + if (faultyReq instanceof WrappedRequirement) + { + faultyResource = + ((WrappedRequirement) faultyReq) + .getDeclaredRequirement().getResource(); + break; + } + } + currentFaultyResources.put(faultyResource, rethrow); + error = rethrow; + } + } + return error; + } + /** * Resolves a dynamic requirement for the specified host resource using the * specified {@link ResolveContext}. The dynamic requirement may contain @@ -479,62 +563,47 @@ public class ResolverImpl implements Resolver // Create all candidates pre-populated with the single candidate set // for the resolving dynamic import of the host. Candidates allCandidates = new Candidates(onDemandResources); - allCandidates.populateDynamic(rc, host, dynamicReq, matches); - // Merge any fragments into hosts. - allCandidates.prepare(rc); + ResolutionError rethrow = allCandidates.populateDynamic(rc, host, dynamicReq, matches); + if (rethrow == null) + { + // Merge any fragments into hosts. + rethrow = allCandidates.prepare(rc); + } + if (rethrow != null) + { + throw rethrow.toException(); + } - List<Candidates> usesPermutations = session.getUsesPermutations(); - List<Candidates> importPermutations = session.getImportPermutations(); + List<Candidates> usesPermutations = new ArrayList<Candidates>(); + List<Candidates> importPermutations = new ArrayList<Candidates>(); // Record the initial candidate permutation. usesPermutations.add(allCandidates); - ResolutionException rethrow; - do { - rethrow = null; - resourcePkgMap.clear(); - session.getPackageSourcesCache().clear(); allCandidates = (usesPermutations.size() > 0) ? usesPermutations.remove(0) : importPermutations.remove(0); //allCandidates.dump(); - try - { - allCandidates.checkSubstitutes(importPermutations); - } - catch (ResolutionException e) + rethrow = allCandidates.checkSubstitutes(importPermutations); + if (rethrow != null) { - rethrow = e; continue; } // For a dynamic import, the instigating resource // will never be a fragment since fragments never // execute code, so we don't need to check for // this case like we do for a normal resolve. - - calculatePackageSpaces(session, - allCandidates.getWrappedHost(host), allCandidates, - resourcePkgMap, new HashMap<Capability, Set<Resource>>(256), - new HashSet<Resource>(64)); -//System.out.println("+++ PACKAGE SPACES START +++"); -//dumpResourcePkgMap(resourcePkgMap); -//System.out.println("+++ PACKAGE SPACES END +++"); - - try - { - checkDynamicPackageSpaceConsistency(session, - allCandidates.getWrappedHost(host), - allCandidates, resourcePkgMap, new HashMap<Resource, Object>(64)); - } - catch (ResolutionException ex) - { - rethrow = ex; - } + rethrow = checkConsistency( + new DumbExecutor(), + session, usesPermutations, importPermutations, allCandidates, + new OpenHashMap<Resource, ResolutionError>(resourcePkgMap.size()), + allCandidates.getHosts(), + true); } while ((rethrow != null) && ((usesPermutations.size() > 0) || (importPermutations.size() > 0))); @@ -567,7 +636,7 @@ public class ResolverImpl implements Resolver } else { - throw rethrow; + throw rethrow.toException(); } } // If there is no exception to rethrow, then this was a clean @@ -583,17 +652,13 @@ public class ResolverImpl implements Resolver allCandidates = session.getMultipleCardCandidates(); } wireMap = populateDynamicWireMap(rc, - host, dynamicReq, resourcePkgMap, wireMap, allCandidates); + host, dynamicReq, wireMap, allCandidates); } } finally { // Always clear the state. - session.getUsesPermutations().clear(); - session.getImportPermutations().clear(); - // TODO these were not cleared out before; but it seems they should be session.setMultipleCardCandidates(null); - session.getPackageSourcesCache().clear(); } } while (retry); @@ -602,41 +667,11 @@ public class ResolverImpl implements Resolver return wireMap; } - private void calculatePackageSpaces( - ResolveSession session, - Resource resource, - Candidates allCandidates, - Map<Resource, Packages> resourcePkgMap, - Map<Capability, Set<Resource>> usesCycleMap, - Set<Resource> cycle) + private static List<WireCandidate> getWireCandidates(ResolveSession session, Candidates allCandidates, Resource resource) { - if (cycle.contains(resource)) - { - return; - } - cycle.add(resource); - - // Make sure package space hasn't already been calculated. - Packages resourcePkgs = resourcePkgMap.get(resource); - if (resourcePkgs != null) - { - if (resourcePkgs.m_isCalculated) - { - return; - } - else - { - resourcePkgs.m_isCalculated = true; - } - } - - // Create parallel lists for requirement and proposed candidate + // Create a list for requirement and proposed candidate // capability or actual capability if resource is resolved or not. - // We use parallel lists so we can calculate the packages spaces for - // resolved and unresolved resources in an identical fashion. - List<Requirement> reqs = new ArrayList<Requirement>(); - List<Capability> caps = new ArrayList<Capability>(); - boolean isDynamicImporting = false; + List<WireCandidate> wireCandidates = new ArrayList<WireCandidate>(256); Wiring wiring = session.getContext().getWirings().get(resource); if (wiring != null) { @@ -650,11 +685,7 @@ public class ResolverImpl implements Resolver // matching dynamic imports. Requirement r = wire.getRequirement(); if (!r.getResource().equals(wire.getRequirer()) - || ((r.getDirectives() - .get(PackageNamespace.REQUIREMENT_RESOLUTION_DIRECTIVE) != null) - && r.getDirectives() - .get(PackageNamespace.REQUIREMENT_RESOLUTION_DIRECTIVE) - .equals(PackageNamespace.RESOLUTION_DYNAMIC))) + || Util.isDynamic(r)) { r = new WrappedRequirement(wire.getRequirer(), r); } @@ -665,8 +696,7 @@ public class ResolverImpl implements Resolver { c = new WrappedCapability(wire.getProvider(), c); } - reqs.add(r); - caps.add(c); + wireCandidates.add(new WireCandidate(r, c)); } // Since the resource is resolved, it could be dynamically importing, @@ -674,24 +704,23 @@ public class ResolverImpl implements Resolver // imports. // // NOTE: If the resource is dynamically importing, the fact that - // the dynamic import is added here last to the parallel reqs/caps + // the dynamic import is added here last to the // list is used later when checking to see if the package being // dynamically imported shadows an existing provider. - for (Requirement req - : Util.getDynamicRequirements(wiring.getResourceRequirements(null))) + for (Requirement req : wiring.getResourceRequirements(null)) { - // Get the candidates for the current requirement. - List<Capability> candCaps = allCandidates.getCandidates(req); - // Optional requirements may not have any candidates. - if (candCaps == null) + if (!Util.isDynamic(req)) { continue; } // Grab first (i.e., highest priority) candidate. - Capability cap = candCaps.get(0); - reqs.add(req); - caps.add(cap); - isDynamicImporting = true; + Capability cap = allCandidates.getFirstCandidate(req); + // Optional requirements may not have any candidates. + if (cap == null) + { + continue; + } + wireCandidates.add(new WireCandidate(req, cap)); // Can only dynamically import one at a time, so break // out of the loop after the first. break; @@ -718,41 +747,43 @@ public class ResolverImpl implements Resolver // Use the same requirement, but list each capability separately for (Capability cap : candCaps) { - reqs.add(req); - caps.add(cap); + wireCandidates.add(new WireCandidate(req, cap)); } } // Grab first (i.e., highest priority) candidate else { Capability cap = candCaps.get(0); - reqs.add(req); - caps.add(cap); + wireCandidates.add(new WireCandidate(req, cap)); } } } } + return wireCandidates; + } - // First, add all exported packages to the target resource's package space. - calculateExportedPackages(session.getContext(), resource, allCandidates, resourcePkgMap); - resourcePkgs = resourcePkgMap.get(resource); + private static Packages getPackages( + ResolveSession session, + Candidates allCandidates, + Map<Resource, List<WireCandidate>> allWireCandidates, + Map<Resource, Packages> allPackages, + Resource resource, + Packages resourcePkgs) + { + // First, all all exported packages + // This has been done previously // Second, add all imported packages to the target resource's package space. - for (int i = 0; i < reqs.size(); i++) + for (WireCandidate wire : allWireCandidates.get(resource)) { - Requirement req = reqs.get(i); - Capability cap = caps.get(i); - calculateExportedPackages( - session.getContext(), cap.getResource(), allCandidates, resourcePkgMap); - // If this resource is dynamically importing, then the last requirement // is the dynamic import being resolved, since it is added last to the // parallel lists above. For the dynamically imported package, make // sure that the resource doesn't already have a provider for that // package, which would be illegal and shouldn't be allowed. - if (isDynamicImporting && ((i + 1) == reqs.size())) + if (Util.isDynamic(wire.requirement)) { - String pkgName = (String) cap.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE); + String pkgName = (String) wire.capability.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE); if (resourcePkgs.m_exportedPkgs.containsKey(pkgName) || resourcePkgs.m_importedPkgs.containsKey(pkgName) || resourcePkgs.m_requiredPkgs.containsKey(pkgName)) @@ -767,18 +798,27 @@ public class ResolverImpl implements Resolver } mergeCandidatePackages( - session.getContext(), resource, req, cap, resourcePkgMap, allCandidates, - new HashMap<Resource, Set<Capability>>(), new HashMap<Resource, Set<Resource>>()); + session, + allPackages, + allCandidates, + resourcePkgs, + wire.requirement, + wire.capability, + new HashSet<Capability>(), + new HashSet<Resource>()); } - // Third, have all candidates to calculate their package spaces. - for (Capability cap : caps) - { - calculatePackageSpaces( - session, cap.getResource(), allCandidates, resourcePkgMap, - usesCycleMap, cycle); - } + return resourcePkgs; + } + private static void computeUses( + ResolveSession session, + Map<Resource, List<WireCandidate>> allWireCandidates, + Map<Resource, Packages> resourcePkgMap, + Resource resource) + { + List<WireCandidate> wireCandidates = allWireCandidates.get(resource); + Packages resourcePkgs = resourcePkgMap.get(resource); // Fourth, if the target resource is unresolved or is dynamically importing, // then add all the uses constraints implied by its imported and required // packages to its package space. @@ -788,20 +828,27 @@ public class ResolverImpl implements Resolver // only exception is if a resolved resource is dynamically importing, then // we need to calculate its uses constraints again to make sure the new // import is consistent with the existing package space. + Wiring wiring = session.getContext().getWirings().get(resource); + Set<Capability> usesCycleMap = new HashSet<Capability>(); + + int size = wireCandidates.size(); + boolean isDynamicImporting = size > 0 + && Util.isDynamic(wireCandidates.get(size - 1).requirement); + if ((wiring == null) || isDynamicImporting) { // Merge uses constraints from required capabilities. - for (int i = 0; i < reqs.size(); i++) + for (WireCandidate w : wireCandidates) { - Requirement req = reqs.get(i); - Capability cap = caps.get(i); + Requirement req = w.requirement; + Capability cap = w.capability; // Ignore bundle/package requirements, since they are // considered below. if (!req.getNamespace().equals(BundleNamespace.BUNDLE_NAMESPACE) && !req.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) { - List<Requirement> blameReqs = new ArrayList<Requirement>(); - blameReqs.add(req); + List<Requirement> blameReqs = + Collections.singletonList(req); mergeUses( session, @@ -811,41 +858,35 @@ public class ResolverImpl implements Resolver blameReqs, cap, resourcePkgMap, - allCandidates, usesCycleMap); } } // Merge uses constraints from imported packages. - for (Entry<String, List<Blame>> entry : resourcePkgs.m_importedPkgs.entrySet()) + for (List<Blame> blames : resourcePkgs.m_importedPkgs.values()) { - for (Blame blame : entry.getValue()) + for (Blame blame : blames) { - // Ignore resources that import from themselves. - if (!blame.m_cap.getResource().equals(resource)) - { - List<Requirement> blameReqs = new ArrayList<Requirement>(); - blameReqs.add(blame.m_reqs.get(0)); + List<Requirement> blameReqs = + Collections.singletonList(blame.m_reqs.get(0)); - mergeUses( - session, - resource, - resourcePkgs, - blame.m_cap, - blameReqs, - null, - resourcePkgMap, - allCandidates, - usesCycleMap); - } + mergeUses( + session, + resource, + resourcePkgs, + blame.m_cap, + blameReqs, + null, + resourcePkgMap, + usesCycleMap); } } // Merge uses constraints from required bundles. - for (Entry<String, List<Blame>> entry : resourcePkgs.m_requiredPkgs.entrySet()) + for (List<Blame> blames : resourcePkgs.m_requiredPkgs.values()) { - for (Blame blame : entry.getValue()) + for (Blame blame : blames) { - List<Requirement> blameReqs = new ArrayList<Requirement>(); - blameReqs.add(blame.m_reqs.get(0)); + List<Requirement> blameReqs = + Collections.singletonList(blame.m_reqs.get(0)); mergeUses( session, @@ -855,26 +896,23 @@ public class ResolverImpl implements Resolver blameReqs, null, resourcePkgMap, - allCandidates, usesCycleMap); } } } } - private void mergeCandidatePackages( - ResolveContext rc, Resource current, Requirement currentReq, - Capability candCap, Map<Resource, Packages> resourcePkgMap, - Candidates allCandidates, Map<Resource, Set<Capability>> cycles, - HashMap<Resource, Set<Resource>> visitedRequiredBundlesMap) + private static void mergeCandidatePackages( + ResolveSession session, + Map<Resource, Packages> resourcePkgMap, + Candidates allCandidates, + Packages packages, + Requirement currentReq, + Capability candCap, + Set<Capability> capabilityCycles, + Set<Resource> visitedRequiredBundles) { - Set<Capability> cycleCaps = cycles.get(current); - if (cycleCaps == null) - { - cycleCaps = new HashSet<Capability>(); - cycles.put(current, cycleCaps); - } - if (!cycleCaps.add(candCap)) + if (!capabilityCycles.add(candCap)) { return; } @@ -882,42 +920,30 @@ public class ResolverImpl implements Resolver if (candCap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) { mergeCandidatePackage( - current, false, currentReq, candCap, resourcePkgMap); + packages.m_importedPkgs, + currentReq, candCap); } else if (candCap.getNamespace().equals(BundleNamespace.BUNDLE_NAMESPACE)) { -// TODO: FELIX3 - THIS NEXT LINE IS A HACK. IMPROVE HOW/WHEN WE CALCULATE EXPORTS. - calculateExportedPackages( - rc, candCap.getResource(), allCandidates, resourcePkgMap); // Get the candidate's package space to determine which packages // will be visible to the current resource. - Packages candPkgs = resourcePkgMap.get(candCap.getResource()); - - Set<Resource> visitedRequiredBundles = visitedRequiredBundlesMap.get(current); - if (visitedRequiredBundles == null) - { - visitedRequiredBundles = new HashSet<Resource>(); - visitedRequiredBundlesMap.put(current, visitedRequiredBundles); - } if (visitedRequiredBundles.add(candCap.getResource())) { // We have to merge all exported packages from the candidate, // since the current resource requires it. - for (Entry<String, Blame> entry : candPkgs.m_exportedPkgs.entrySet()) + for (Blame blame : resourcePkgMap.get(candCap.getResource()).m_exportedPkgs.values()) { mergeCandidatePackage( - current, - true, + packages.m_requiredPkgs, currentReq, - entry.getValue().m_cap, - resourcePkgMap); + blame.m_cap); } } // If the candidate requires any other bundles with reexport visibility, // then we also need to merge their packages too. - Wiring candWiring = rc.getWirings().get(candCap.getResource()); + Wiring candWiring = session.getContext().getWirings().get(candCap.getResource()); if (candWiring != null) { for (Wire w : candWiring.getRequiredResourceWires(null)) @@ -925,20 +951,17 @@ public class ResolverImpl implements Resolver if (w.getRequirement().getNamespace() .equals(BundleNamespace.BUNDLE_NAMESPACE)) { - String value = w.getRequirement() - .getDirectives() - .get(BundleNamespace.REQUIREMENT_VISIBILITY_DIRECTIVE); - if ((value != null) - && value.equals(BundleNamespace.VISIBILITY_REEXPORT)) + if (Util.isReexport(w.getRequirement())) { mergeCandidatePackages( - rc, - current, - currentReq, - w.getCapability(), + session, resourcePkgMap, allCandidates, - cycles, visitedRequiredBundlesMap); + packages, + currentReq, + w.getCapability(), + capabilityCycles, + visitedRequiredBundles); } } } @@ -949,37 +972,31 @@ public class ResolverImpl implements Resolver { if (req.getNamespace().equals(BundleNamespace.BUNDLE_NAMESPACE)) { - String value = - req.getDirectives() - .get(BundleNamespace.REQUIREMENT_VISIBILITY_DIRECTIVE); - if ((value != null) - && value.equals(BundleNamespace.VISIBILITY_REEXPORT)) + if (Util.isReexport(req)) { Capability cap = allCandidates.getFirstCandidate(req); - if (cap != null) { + if (cap != null) + { mergeCandidatePackages( - rc, - current, - currentReq, - cap, + session, resourcePkgMap, allCandidates, - cycles, - visitedRequiredBundlesMap); + packages, + currentReq, + cap, + capabilityCycles, + visitedRequiredBundles); } } } } } } - - cycles.remove(current); } - private void mergeCandidatePackage( - Resource current, boolean requires, - Requirement currentReq, Capability candCap, - Map<Resource, Packages> resourcePkgMap) + private static void mergeCandidatePackage( + OpenHashMap<String, List<Blame>> packages, + Requirement currentReq, Capability candCap) { if (candCap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) { @@ -988,32 +1005,20 @@ public class ResolverImpl implements Resolver String pkgName = (String) candCap.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE); - List<Requirement> blameReqs = new ArrayList<Requirement>(); - blameReqs.add(currentReq); - - Packages currentPkgs = resourcePkgMap.get(current); + List<Requirement> blameReqs = Collections.singletonList(currentReq); - Map<String, List<Blame>> packages = (requires) - ? currentPkgs.m_requiredPkgs - : currentPkgs.m_importedPkgs; - List<Blame> blames = packages.get(pkgName); - if (blames == null) - { - blames = new ArrayList<Blame>(); - packages.put(pkgName, blames); - } + List<Blame> blames = packages.getOrCompute(pkgName); blames.add(new Blame(candCap, blameReqs)); //dumpResourcePkgs(current, currentPkgs); } } - private void mergeUses( + private static void mergeUses( ResolveSession session, Resource current, Packages currentPkgs, Capability mergeCap, List<Requirement> blameReqs, Capability matchingCap, Map<Resource, Packages> resourcePkgMap, - Candidates allCandidates, - Map<Capability, Set<Resource>> cycleMap) + Set<Capability> cycleMap) { // If there are no uses, then just return. // If the candidate resource is the same as the current resource, @@ -1025,18 +1030,12 @@ public class ResolverImpl implements Resolver } // Check for cycles. - Set<Resource> set = cycleMap.get(mergeCap); - if (set == null) - { - set = new HashSet<Resource>(); - cycleMap.put(mergeCap, set); - } - if (!set.add(current)) + if (!cycleMap.add(mergeCap)) { return; } - for (Capability candSourceCap : getPackageSources(session, mergeCap, resourcePkgMap)) + for (Capability candSourceCap : getPackageSources(mergeCap, resourcePkgMap)) { List<String> uses; // TODO: RFC-112 - Need impl-specific type @@ -1047,7 +1046,7 @@ public class ResolverImpl implements Resolver // else { String s = candSourceCap.getDirectives().get(Namespace.CAPABILITY_USES_DIRECTIVE); - if (s != null) + if (s != null && s.length() > 0) { // Parse these uses directive. uses = session.getUsesCache().get(s); @@ -1059,19 +1058,18 @@ public class ResolverImpl implements Resolver } else { - uses = Collections.emptyList(); + continue; } } + Packages candSourcePkgs = resourcePkgMap.get(candSourceCap.getResource()); for (String usedPkgName : uses) { - Packages candSourcePkgs = resourcePkgMap.get(candSourceCap.getResource()); List<Blame> candSourceBlames; // Check to see if the used package is exported. Blame candExportedBlame = candSourcePkgs.m_exportedPkgs.get(usedPkgName); if (candExportedBlame != null) { - candSourceBlames = new ArrayList<Blame>(1); - candSourceBlames.add(candExportedBlame); + candSourceBlames = Collections.singletonList(candExportedBlame); } else { @@ -1080,8 +1078,10 @@ public class ResolverImpl implements Resolver candSourceBlames = candSourcePkgs.m_requiredPkgs.get(usedPkgName); // Lastly, if the used package is not required, check to see if it // is imported. - candSourceBlames = (candSourceBlames != null) - ? candSourceBlames : candSourcePkgs.m_importedPkgs.get(usedPkgName); + if (candSourceBlames == null) + { + candSourceBlames = candSourcePkgs.m_importedPkgs.get(usedPkgName); + } } // If the used package cannot be found, then just ignore it @@ -1091,35 +1091,152 @@ public class ResolverImpl implements Resolver continue; } - Map<Capability, UsedBlames> usedPkgBlames = currentPkgs.m_usedPkgs.get(usedPkgName); - if (usedPkgBlames == null) - { - usedPkgBlames = new LinkedHashMap<Capability, UsedBlames>(); - currentPkgs.m_usedPkgs.put(usedPkgName, usedPkgBlames); - } + ArrayMap<Capability, UsedBlames> usedPkgBlames = currentPkgs.m_usedPkgs.getOrCompute(usedPkgName); for (Blame blame : candSourceBlames) { if (blame.m_reqs != null) { - List<Requirement> blameReqs2 = new ArrayList<Requirement>(blameReqs); + List<Requirement> blameReqs2 = new ArrayList<Requirement>(blameReqs.size() + 1); + blameReqs2.addAll(blameReqs); // Only add the last requirement in blame chain because // that is the requirement wired to the blamed capability blameReqs2.add(blame.m_reqs.get(blame.m_reqs.size() - 1)); addUsedBlame(usedPkgBlames, blame.m_cap, blameReqs2, matchingCap); mergeUses(session, current, currentPkgs, blame.m_cap, blameReqs2, matchingCap, - resourcePkgMap, allCandidates, cycleMap); + resourcePkgMap, cycleMap); } else { addUsedBlame(usedPkgBlames, blame.m_cap, blameReqs, matchingCap); mergeUses(session, current, currentPkgs, blame.m_cap, blameReqs, matchingCap, - resourcePkgMap, allCandidates, cycleMap); + resourcePkgMap, cycleMap); } } } } } + private static Map<Resource, Packages> calculatePackageSpaces( + final Executor innerExecutor, + final ResolveSession session, + final Candidates allCandidates, + Collection<Resource> hosts) + { + final EnhancedExecutor executor = new EnhancedExecutor(innerExecutor); + + // Parallel compute wire candidates + final Map<Resource, List<WireCandidate>> allWireCandidates = new ConcurrentHashMap<Resource, List<WireCandidate>>(); + { + final ConcurrentMap<Resource, Runnable> tasks = new ConcurrentHashMap<Resource, Runnable>(allCandidates.getNbResources()); + class Computer implements Runnable + { + final Resource resource; + public Computer(Resource resource) + { + this.resource = resource; + } + public void run() + { + List<WireCandidate> wireCandidates = getWireCandidates(session, allCandidates, resource); + allWireCandidates.put(resource, wireCandidates); + for (WireCandidate w : wireCandidates) + { + Resource u = w.capability.getResource(); + if (!tasks.containsKey(u)) + { + Computer c = new Computer(u); + if (tasks.putIfAbsent(u, c) == null) + { + executor.execute(c); + } + } + } + } + } + for (Resource resource : hosts) + { + executor.execute(new Computer(resource)); + } + executor.await(); + } + + // Parallel get all exported packages + final OpenHashMap<Resource, Packages> allPackages = new OpenHashMap<Resource, Packages>(allCandidates.getNbResources()); + for (final Resource resource : allWireCandidates.keySet()) + { + final Packages packages = new Packages(resource); + allPackages.put(resource, packages); + executor.execute(new Runnable() + { + public void run() + { + calculateExportedPackages(session, allCandidates, resource, packages.m_exportedPkgs); + } + }); + } + executor.await(); + + // Parallel compute package lists + for (final Resource resource : allWireCandidates.keySet()) + { + executor.execute(new Runnable() + { + public void run() + { + getPackages(session, allCandidates, allWireCandidates, allPackages, resource, allPackages.get(resource)); + } + }); + } + executor.await(); + + // Compute package sources + // First, sequentially compute packages for resources + // that have required packages, so that all recursive + // calls can be done without threading problems + for (Map.Entry<Resource, Packages> entry : allPackages.fast()) + { + final Resource resource = entry.getKey(); + final Packages packages = entry.getValue(); + if (!packages.m_requiredPkgs.isEmpty()) + { + getPackageSourcesInternal(session, allPackages, resource, packages); + } + } + // Next, for all remaining resources, we can compute them + // in parallel, as they won't refer to other resource packages + for (Map.Entry<Resource, Packages> entry : allPackages.fast()) + { + final Resource resource = entry.getKey(); + final Packages packages = entry.getValue(); + if (packages.m_sources.isEmpty()) + { + executor.execute(new Runnable() + { + public void run() + { + getPackageSourcesInternal(session, allPackages, resource, packages); + } + }); + } + } + executor.await(); + + // Parallel compute uses + for (final Resource resource : allWireCandidates.keySet()) + { + executor.execute(new Runnable() + { + public void run() + { + computeUses(session, allWireCandidates, allPackages, resource); + } + }); + } + executor.await(); + + return allPackages; + } + private static List<String> parseUses(String s) { int nb = 1; int l = s.length(); @@ -1157,71 +1274,57 @@ public class ResolverImpl implements Resolver } private static void addUsedBlame( - Map<Capability, UsedBlames> usedBlames, Capability usedCap, + ArrayMap<Capability, UsedBlames> usedBlames, Capability usedCap, List<Requirement> blameReqs, Capability matchingCap) { // Create a new Blame based off the used capability and the // blame chain requirements. Blame newBlame = new Blame(usedCap, blameReqs); // Find UsedBlame that uses the same capablity as the new blame. - UsedBlames addToBlame = usedBlames.get(usedCap); - if (addToBlame == null) - { - // If none exist create a new UsedBlame for the capability. - addToBlame = new UsedBlames(usedCap); - usedBlames.put(usedCap, addToBlame); - } + UsedBlames addToBlame = usedBlames.getOrCompute(usedCap); // Add the new Blame and record the matching capability cause // in case the root requirement has multiple cardinality. addToBlame.addBlame(newBlame, matchingCap); } - private void checkPackageSpaceConsistency( + private ResolutionError checkPackageSpaceConsistency( ResolveSession session, + List<Candidates> usesPermutations, + List<Candidates> importPermutations, Resource resource, Candidates allCandidates, + boolean dynamic, Map<Resource, Packages> resourcePkgMap, - Map<Resource, Object> resultCache) throws ResolutionException + Map<Resource, Object> resultCache) { - if (session.getContext().getWirings().containsKey(resource)) + if (!dynamic && session.getContext().getWirings().containsKey(resource)) { - return; + return null; } - checkDynamicPackageSpaceConsistency( - session, resource, allCandidates, resourcePkgMap, resultCache); - } - - private void checkDynamicPackageSpaceConsistency( - ResolveSession session, - Resource resource, - Candidates allCandidates, - Map<Resource, Packages> resourcePkgMap, - Map<Resource, Object> resultCache) throws ResolutionException - { - if (resultCache.containsKey(resource)) + Object cache = resultCache.get(resource); + if (cache != null) { - return; + return cache instanceof ResolutionError ? (ResolutionError) cache : null; } Packages pkgs = resourcePkgMap.get(resource); - ResolutionException rethrow = null; + ResolutionError rethrow = null; Candidates permutation = null; Set<Requirement> mutated = null; - List<Candidates> importPermutations = session.getImportPermutations(); - List<Candidates> usesPermutations = session.getUsesPermutations(); - // Check for conflicting imports from fragments. // TODO: Is this only needed for imports or are generic and bundle requirements also needed? // I think this is only a special case for fragment imports because they can overlap // host imports, which is not allowed in normal metadata. - for (Entry<String, List<Blame>> entry : pkgs.m_importedPkgs.entrySet()) + for (Entry<String, List<Blame>> entry : pkgs.m_importedPkgs.fast()) { - if (entry.getValue().size() > 1) + String pkgName = entry.getKey(); + List<Blame> blames = entry.getValue(); + if (blames.size() > 1) { Blame sourceBlame = null; - for (Blame blame : entry.getValue()) + for (Blame blame : blames) { if (sourceBlame == null) { @@ -1234,47 +1337,36 @@ public class ResolverImpl implements Resolver // Try to permutate the source requirement. allCandidates.permutate(sourceBlame.m_reqs.get(0), importPermutations); // Report conflict. - ResolutionException ex = new ResolutionException( - "Uses constraint violation. Unable to resolve resource " - + Util.getSymbolicName(resource) - + " [" + resource - + "] because it is exposed to package '" - + entry.getKey() - + "' from resources " - + Util.getSymbolicName(sourceBlame.m_cap.getResource()) - + " [" + sourceBlame.m_cap.getResource() - + "] and " - + Util.getSymbolicName(blame.m_cap.getResource()) - + " [" + blame.m_cap.getResource() - + "] via two dependency chains.\n\nChain 1:\n" - + toStringBlame(session.getContext(), allCandidates, sourceBlame) - + "\n\nChain 2:\n" - + toStringBlame(session.getContext(), allCandidates, blame), - null, - Collections.singleton(blame.m_reqs.get(0))); - m_logger.log( - Logger.LOG_DEBUG, - "Candidate permutation failed due to a conflict with a " - + "fragment import; will try another if possible.", - ex); - throw ex; + rethrow = new UseConstraintError( + session.getContext(), allCandidates, + resource, pkgName, + sourceBlame, blame); + if (m_logger.isDebugEnabled()) + { + m_logger.debug( + "Candidate permutation failed due to a conflict with a " + + "fragment import; will try another if possible." + + " (" + rethrow.getMessage() + ")"); + } + return rethrow; } } } } // Check if there are any uses conflicts with exported packages. - for (Entry<String, Blame> entry : pkgs.m_exportedPkgs.entrySet()) + for (Entry<String, Blame> entry : pkgs.m_exportedPkgs.fast()) { String pkgName = entry.getKey(); Blame exportBlame = entry.getValue(); - if (!pkgs.m_usedPkgs.containsKey(pkgName)) + ArrayMap<Capability, UsedBlames> pkgBlames = pkgs.m_usedPkgs.get(pkgName); + if (pkgBlames == null) { continue; } - for (UsedBlames usedBlames : pkgs.m_usedPkgs.get(pkgName).values()) + for (UsedBlames usedBlames : pkgBlames.values()) { - if (!isCompatible(session, Collections.singletonList(exportBlame), usedBlames.m_cap, resourcePkgMap)) + if (!isCompatible(exportBlame, usedBlames.m_cap, resourcePkgMap)) { for (Blame usedBlame : usedBlames.m_blames) { @@ -1289,22 +1381,11 @@ public class ResolverImpl implements Resolver permutation = (permutation != null) ? permutation : allCandidates.copy(); - rethrow = (rethrow != null) - ? rethrow - : new ResolutionException( - "Uses constraint violation. Unable to resolve resource " - + Util.getSymbolicName(resource) - + " [" + resource - + "] because it exports package '" - + pkgName - + "' and is also exposed to it from resource " - + Util.getSymbolicName(usedBlame.m_cap.getResource()) - + " [" + usedBlame.m_cap.getResource() - + "] via the following dependency chain:\n\n" - + toStringBlame(session.getContext(), allCandidates, usedBlame), - null, - null); - + if (rethrow == null) + { + rethrow = new UseConstraintError( + session.getContext(), allCandidates, resource, pkgName, usedBlame); + } mutated = (mutated != null) ? mutated : new HashSet<Requirement>(); @@ -1344,12 +1425,13 @@ public class ResolverImpl implements Resolver { usesPermutations.add(permutation); } - m_logger.log( - Logger.LOG_DEBUG, - "Candidate permutation failed due to a conflict between " - + "an export and import; will try another if possible.", - rethrow); - throw rethrow; + if (m_logger.isDebugEnabled()) + { + m_logger.debug("Candidate permutation failed due to a conflict between " + + "an export and import; will try another if possible." + + " (" + rethrow.getMessage() + ")"); + } + return rethrow; } } @@ -1357,26 +1439,35 @@ public class ResolverImpl implements Resolver // We combine the imported and required packages here into one map. // Imported packages are added after required packages because they shadow or override // the packages from required bundles. - Map<String, List<Blame>> allImportRequirePkgs = - new LinkedHashMap<String, List<Blame>>(pkgs.m_requiredPkgs.size() + pkgs.m_importedPkgs.size()); - allImportRequirePkgs.putAll(pkgs.m_requiredPkgs); - allImportRequirePkgs.putAll(pkgs.m_importedPkgs); + OpenHashMap<String, List<Blame>> allImportRequirePkgs; + if (pkgs.m_requiredPkgs.isEmpty()) + { + allImportRequirePkgs = pkgs.m_importedPkgs; + } + else + { + allImportRequirePkgs = new OpenHashMap<String, List<Blame>>(pkgs.m_requiredPkgs.size() + pkgs.m_importedPkgs.size()); + allImportRequirePkgs.putAll(pkgs.m_requiredPkgs); + allImportRequirePkgs.putAll(pkgs.m_importedPkgs); + } - for (Entry<String, List<Blame>> requirementBlames : allImportRequirePkgs.entrySet()) + for (Entry<String, List<Blame>> entry : allImportRequirePkgs.fast()) { - String pkgName = requirementBlames.getKey(); - if (!pkgs.m_usedPkgs.containsKey(pkgName)) + String pkgName = entry.getKey(); + ArrayMap<Capability, UsedBlames> pkgBlames = pkgs.m_usedPkgs.get(pkgName); + if (pkgBlames == null) { continue; } + List<Blame> requirementBlames = entry.getValue(); - for (UsedBlames usedBlames : pkgs.m_usedPkgs.get(pkgName).values()) + for (UsedBlames usedBlames : pkgBlames.values()) { - if (!isCompatible(session, requirementBlames.getValue(), usedBlames.m_cap, resourcePkgMap)) + if (!isCompatible(requirementBlames, usedBlames.m_cap, resourcePkgMap)) { // Split packages, need to think how to get a good message for split packages (sigh) // For now we just use the first requirement that brings in the package that conflicts - Blame requirementBlame = requirementBlames.getValue().get(0); + Blame requirementBlame = requirementBlames.get(0); for (Blame usedBlame : usedBlames.m_blames) { if (checkMultiple(session, usedBlames, usedBlame, allCandidates)) @@ -1390,26 +1481,13 @@ public class ResolverImpl implements Resolver permutation = (permutation != null) ? permutation : allCandidates.copy(); - rethrow = (rethrow != null) - ? rethrow - : new ResolutionException( - "Uses constraint violation. Unable to resolve resource " - + Util.getSymbolicName(resource) - + " [" + resource - + "] because it is exposed to package '" - + pkgName - + "' from resources " - + Util.getSymbolicName(requirementBlame.m_cap.getResource()) - + " [" + requirementBlame.m_cap.getResource() - + "] and " - + Util.getSymbolicName(usedBlame.m_cap.getResource()) - + " [" + usedBlame.m_cap.getResource() - + "] via two dependency chains.\n\nChain 1:\n" - + toStringBlame(session.getContext(), allCandidates, requirementBlame) - + "\n\nChain 2:\n" - + toStringBlame(session.getContext(), allCandidates, usedBlame), - null, - null); + if (rethrow == null) + { + rethrow = new UseConstraintError( + session.getContext(), allCandidates, + resource, pkgName, + requirementBlame, usedBlame); + } mutated = (mutated != null) ? mutated @@ -1460,7 +1538,7 @@ public class ResolverImpl implements Resolver // Try to permutate the candidate for the original // import requirement; only permutate it if we haven't // done so already. - for (Blame requirementBlame : requirementBlames.getValue()) + for (Blame requirementBlame : requirementBlames) { Requirement req = requirementBlame.m_reqs.get(0); if (!mutated.contains(req)) @@ -1473,12 +1551,14 @@ public class ResolverImpl implements Resolver } } - m_logger.log( - Logger.LOG_DEBUG, - "Candidate permutation failed due to a conflict between " - + "imports; will try another if possible.", - rethrow); - throw rethrow; + if (m_logger.isDebugEnabled()) + { + m_logger.debug("Candidate permutation failed due to a conflict between " + + "imports; will try another if possible." + + " (" + rethrow.getMessage() + ")" + ); + } + return rethrow; } } } @@ -1497,13 +1577,10 @@ public class ResolverImpl implements Resolver { if (!resource.equals(cap.getResource())) { - try - { - checkPackageSpaceConsistency( - session, cap.getResource(), - allCandidates, resourcePkgMap, resultCache); - } - catch (ResolutionException ex) + rethrow = checkPackageSpaceConsistency( + session, usesPermutations, importPermutations, cap.getResource(), + allCandidates, false, resourcePkgMap, resultCache); + if (rethrow != null) { // If the lower level check didn't create any permutations, // then we should create an import permutation for the @@ -1513,11 +1590,12 @@ public class ResolverImpl implements Resolver { allCandidates.permutate(req, importPermutations); } - throw ex; + return rethrow; } } } } + return null; } private boolean checkMultiple( @@ -1547,25 +1625,17 @@ public class ResolverImpl implements Resolver return (candidates != null) && !candidates.isEmpty(); } - private static void calculateExportedPackages( - ResolveContext rc, - Resource resource, - Candidates allCandidates, - Map<Resource, Packages> resourcePkgMap) + private static OpenHashMap<String, Blame> calculateExportedPackages( + ResolveSession session, + Candidates allCandidates, + Resource resource, + OpenHashMap<String, Blame> exports) { - Packages packages = resourcePkgMap.get(resource); - if (packages != null) - { - return; - } - packages = new Packages(resource); - // Get all exported packages. - Wiring wiring = rc.getWirings().get(resource); + Wiring wiring = session.getContext().getWirings().get(resource); List<Capability> caps = (wiring != null) ? wiring.getResourceCapabilities(null) : resource.getCapabilities(null); - Map<String, Capability> exports = new HashMap<String, Capability>(caps.size()); for (Capability cap : caps) { if (cap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) @@ -1576,7 +1646,7 @@ public class ResolverImpl implements Resolver } exports.put( (String) cap.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE), - cap); + new Blame(cap, null)); } } // Remove substitutable exports that were imported. @@ -1601,147 +1671,142 @@ public class ResolverImpl implements Resolver } } } - - // Add all non-substituted exports to the resources's package space. - for (Entry<String, Capability> entry : exports.entrySet()) - { - packages.m_exportedPkgs.put( - entry.getKey(), new Blame(entry.getValue(), null)); - } } + return exports; + } - resourcePkgMap.put(resource, packages); + private static boolean isCompatible( + Blame currentBlame, Capability candCap, + Map<Resource, Packages> resourcePkgMap) + { + if (currentBlame.m_cap.equals(candCap)) + { + return true; + } + Set<Capability> candSources = getPackageSources(candCap, resourcePkgMap); + Set<Capability> currentSources = getPackageSources(currentBlame.m_cap, resourcePkgMap); + return currentSources.containsAll(candSources) + || candSources.containsAll(currentSources); } - private boolean isCompatible( - ResolveSession session, List<Blame> currentBlames, Capability candCap, + private static boolean isCompatible( + List<Blame> currentBlames, Capability candCap, Map<Resource, Packages> resourcePkgMap) { - if ((!currentBlames.isEmpty()) && (candCap != null)) + int size = currentBlames.size(); + switch (size) { - Set<Capability> currentSources; - // quick check for single source package - if (currentBlames.size() == 1) + case 0: + return true; + case 1: + return isCompatible(currentBlames.get(0), candCap, resourcePkgMap); + default: + Set<Capability> currentSources = new HashSet<Capability>(currentBlames.size()); + for (Blame currentBlame : currentBlames) { - Capability currentCap = currentBlames.get(0).m_cap; - if (currentCap.equals(candCap)) - { - return true; - } - currentSources = - getPackageSources( - session, - currentCap, - resourcePkgMap); + Set<Capability> blameSources = getPackageSources(currentBlame.m_cap, resourcePkgMap); + currentSources.addAll(blameSources); } - else - { - currentSources = new HashSet<Capability>(currentBlames.size()); - for (Blame currentBlame : currentBlames) - { - Set<Capability> blameSources = - getPackageSources( - session, - currentBlame.m_cap, - resourcePkgMap); - for (Capability blameSource : blameSources) - { - currentSources.add(blameSource); - } - } - } - - Set<Capability> candSources = - getPackageSources( - session, - candCap, - resourcePkgMap); - + Set<Capability> candSources = getPackageSources(candCap, resourcePkgMap); return currentSources.containsAll(candSources) || candSources.containsAll(currentSources); } - return true; } - private Set<Capability> getPackageSources( - ResolveSession session, Capability cap, Map<Resource, Packages> resourcePkgMap) + private static Set<Capability> getPackageSources( + Capability cap, Map<Resource, Packages> resourcePkgMap) { - Map<Capability, Set<Capability>> packageSourcesCache = session.getPackageSourcesCache(); - // If it is a package, then calculate sources for it. - if (cap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) + Resource resource = cap.getResource(); + if(resource == null) { - Set<Capability> sources = packageSourcesCache.get(cap); - if (sources == null) - { - sources = getPackageSourcesInternal( - session.getContext(), cap, resourcePkgMap, - new HashSet<Capability>(64), new HashSet<Capability>(64)); - packageSourcesCache.put(cap, sources); - } - return sources; + return new HashSet<Capability>(); } - // Otherwise, need to return generic capabilies that have - // uses constraints so they are included for consistency - // checking. - String uses = cap.getDirectives().get(Namespace.CAPABILITY_USES_DIRECTIVE); - if ((uses != null) && (uses.length() > 0)) + OpenHashMap<Capability, Set<Capability>> sources = resourcePkgMap.get(resource).m_sources; + if(sources == null) { - return Collections.singleton(cap); + return new HashSet<Capability>(); } - return Collections.emptySet(); + Set<Capability> packageSources = sources.get(cap); + if(packageSources == null) + { + return new HashSet<Capability>(); + } + + return packageSources; } - private static Set<Capability> getPackageSourcesInternal( - ResolveContext rc, Capability cap, Map<Resource, Packages> resourcePkgMap, - Set<Capability> sources, Set<Capability> cycleMap) + private static void getPackageSourcesInternal( + ResolveSession session, Map<Resource, Packages> resourcePkgMap, + Resource resource, Packages packages) { - if (cap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) + Wiring wiring = session.getContext().getWirings().get(resource); + List<Capability> caps = (wiring != null) + ? wiring.getResourceCapabilities(null) + : resource.getCapabilities(null); + OpenHashMap<String, Set<Capability>> pkgs = new OpenHashMap<String, Set<Capability>>(caps.size()) { + public Set<Capability> compute(String pkgName) { + return new HashSet<Capability>(); + } + }; + Map<Capability, Set<Capability>> sources = packages.m_sources; + for (Capability sourceCap : caps) { - if (!cycleMap.add(cap)) + if (sourceCap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) { - return sources; + String pkgName = (String) sourceCap.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE); + Set<Capability> pkgCaps = pkgs.getOrCompute(pkgName); + // Since capabilities may come from fragments, we need to check + // for that case and wrap them. + if (!resource.equals(sourceCap.getResource())) + { + sourceCap = new WrappedCapability(resource, sourceCap); + } + sources.put(sourceCap, pkgCaps); + pkgCaps.add(sourceCap); } - - // Get the package name associated with the capability. - String pkgName = cap.getAttributes() - .get(PackageNamespace.PACKAGE_NAMESPACE).toString(); - - // Since a resource can export the same package more than once, get - // all package capabilities for the specified package name. - Wiring wiring = rc.getWirings().get(cap.getResource()); - List<Capability> caps = (wiring != null) - ? wiring.getResourceCapabilities(null) - : cap.getResource().getCapabilities(null); - for (Capability sourceCap : caps) + else { - if (sourceCap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE) - && sourceCap.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE).equals(pkgName)) + // Otherwise, need to return generic capabilities that have + // uses constraints so they are included for consistency + // checking. + String uses = sourceCap.getDirectives().get(Namespace.CAPABILITY_USES_DIRECTIVE); + if ((uses != null) && uses.length() > 0) { - // Since capabilities may come from fragments, we need to check - // for that case and wrap them. - if (!cap.getResource().equals(sourceCap.getResource())) - { - sourceCap = new WrappedCapability(cap.getResource(), sourceCap); - } - sources.add(sourceCap); + sources.put(sourceCap, Collections.singleton(sourceCap)); + } + else + { + sources.put(sourceCap, Collections.<Capability>emptySet()); } } - - // Then get any addition sources for the package from required bundles. - Packages pkgs = resourcePkgMap.get(cap.getResource()); - List<Blame> required = pkgs.m_requiredPkgs.get(pkgName); + } + for (Map.Entry<String, Set<Capability>> pkg : pkgs.fast()) + { + String pkgName = pkg.getKey(); + List<Blame> required = packages.m_requiredPkgs.get(pkgName); if (required != null) { + Set<Capability> srcs = pkg.getValue(); for (Blame blame : required) { - getPackageSourcesInternal(rc, blame.m_cap, resourcePkgMap, sources, cycleMap); + Capability bcap = blame.m_cap; + if (srcs.add(bcap)) + { + Resource capResource = bcap.getResource(); + Packages capPackages = resourcePkgMap.get(capResource); + Set<Capability> additional = capPackages.m_sources.get(bcap); + if (additional == null) + { + getPackageSourcesInternal(session, resourcePkgMap, capResource, capPackages); + additional = capPackages.m_sources.get(bcap); + } + srcs.addAll(additional); + } } } } - - return sources; } private static Resource getDeclaredResource(Resource resource) @@ -1772,7 +1837,7 @@ public class ResolverImpl implements Resolver } private static Map<Resource, List<Wire>> populateWireMap( - ResolveContext rc, Resource resource, Map<Resource, Packages> resourcePkgMap, + ResolveContext rc, Resource resource, Map<Resource, List<Wire>> wireMap, Candidates allCandidates) { Resource unwrappedResource = getDeclaredResource(resource); @@ -1798,32 +1863,20 @@ public class ResolverImpl implements Resolver if (!cand.getNamespace().startsWith("osgi.wiring.") || !resource.equals(cand.getResource())) { - // If we don't already have wires for the candidate, - // then recursively populate them. - if (!rc.getWirings().containsKey(cand.getResource())) - { - // Need to special case the candidate for identity - // capabilities since it may be from a fragment and - // we don't want to populate wires for the fragment, - // but rather the host to which it is attached. - Resource targetCand = cand.getResource(); - if (IdentityNamespace.IDENTITY_NAMESPACE.equals(cand.getNamespace()) - && Util.isFragment(targetCand)) - { - targetCand = allCandidates.getFirstCandidate( - targetCand.getRequirements(HostNamespace.HOST_NAMESPACE).get(0)) - .getResource(); - targetCand = allCandidates.getWrappedHost(targetCand); - } - - populateWireMap(rc, targetCand, - resourcePkgMap, wireMap, allCandidates); + // Populate wires for the candidate + populateWireMap(rc, cand.getResource(), + wireMap, allCandidates); + + Resource provider; + if (req.getNamespace().equals(IdentityNamespace.IDENTITY_NAMESPACE)) { + provider = getDeclaredCapability(cand).getResource(); + } else { + provider = getDeclaredResource(cand.getResource()); } - Wire wire = new WireImpl( unwrappedResource, getDeclaredRequirement(req), - getDeclaredResource(cand.getResource()), + provider, getDeclaredCapability(cand)); if (req.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) { @@ -1942,7 +1995,6 @@ public class ResolverImpl implements Resolver private static Map<Resource, List<Wire>> populateDynamicWireMap( ResolveContext rc, Resource resource, Requirement dynReq, - Map<Resource, Packages> resourcePkgMap, Map<Resource, List<Wire>> wireMap, Candidates allCandidates) { wireMap.put(resource, Collections.<Wire>emptyList()); @@ -1955,7 +2007,7 @@ public class ResolverImpl implements Resolver if (!rc.getWirings().containsKey(dynCand.getResource())) { - populateWireMap(rc, dynCand.getResource(), resourcePkgMap, + populateWireMap(rc, dynCand.getResource(), wireMap, allCandidates); } @@ -2003,163 +2055,60 @@ public class ResolverImpl implements Resolver System.out.println(" " + entry.getKey() + " - " + entry.getValue()); } System.out.println(" USED"); - for (Entry<String, Map<Capability, UsedBlames>> entry : packages.m_usedPkgs.entrySet()) + for (Entry<String, ArrayMap<Capability, UsedBlames>> entry : packages.m_usedPkgs.entrySet()) { System.out.println(" " + entry.getKey() + " - " + entry.getValue().values()); } } - private static String toStringBlame( - ResolveContext rc, Candidates allCandidates, Blame blame) + private static final class WireCandidate { - StringBuilder sb = new StringBuilder(); - if ((blame.m_reqs != null) && !blame.m_reqs.isEmpty()) - { - for (int i = 0; i < blame.m_reqs.size(); i++) - { - Requirement req = blame.m_reqs.get(i); - sb.append(" "); - sb.append(Util.getSymbolicName(req.getResource())); - sb.append(" ["); - sb.append(req.getResource().toString()); - sb.append("]\n"); - if (req.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) - { - sb.append(" import: "); - } - else - { - sb.append(" require: "); - } - sb.append(req.getDirectives().get(Namespace.REQUIREMENT_FILTER_DIRECTIVE)); - sb.append("\n |"); - if (req.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) - { - sb.append("\n export: "); - } - else - { - sb.append("\n provide: "); - } - if ((i + 1) < blame.m_reqs.size()) - { - Capability cap = getSatisfyingCapability( - rc, - allCandidates, - blame.m_reqs.get(i)); - if (cap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) - { - sb.append(PackageNamespace.PACKAGE_NAMESPACE); - sb.append("="); - sb.append(cap.getAttributes() - .get(PackageNamespace.PACKAGE_NAMESPACE).toString()); - Capability usedCap = - getSatisfyingCapability( - rc, - allCandidates, - blame.m_reqs.get(i + 1)); - sb.append("; uses:="); - sb.append(usedCap.getAttributes() - .get(PackageNamespace.PACKAGE_NAMESPACE)); - } - else - { - sb.append(cap); - } - sb.append("\n"); - } - else - { - Capability export = getSatisfyingCapability( - rc, - allCandidates, - blame.m_reqs.get(i)); - sb.append(export.getNamespace()); - sb.append(": "); - Object namespaceVal = export.getAttributes().get(export.getNamespace()); - if (namespaceVal != null) - { - sb.append(namespaceVal.toString()); - } - else - { - for (Entry<String, Object> attrEntry : export.getAttributes().entrySet()) - { - sb.append(attrEntry.getKey()).append('=') - .append(attrEntry.getValue()).append(';'); - } - } - if (export.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE) - && !export.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE) - .equals(blame.m_cap.getAttributes().get( - PackageNamespace.PACKAGE_NAMESPACE))) - { - sb.append("; uses:="); - sb.append(blame.m_cap.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE)); - sb.append("\n export: "); - sb.append(PackageNamespace.PACKAGE_NAMESPACE); - sb.append("="); - sb.append(blame.m_cap.getAttributes() - .get(PackageNamespace.PACKAGE_NAMESPACE).toString()); - } - sb.append("\n "); - sb.append(Util.getSymbolicName(blame.m_cap.getResource())); - sb.append(" ["); - sb.append(blame.m_cap.getResource().toString()); - sb.append("]"); - } - } - } - else - { - sb.append(blame.m_cap.getResource().toString()); - } - return sb.toString(); - } + public final Requirement requirement; + public final Capability capability; - private static Capability getSatisfyingCapability( - ResolveContext rc, Candidates allCandidates, Requirement req) - { - // If the requiring revision is not resolved, then check in the - // candidate map for its matching candidate. - Capability cap = allCandidates.getFirstCandidate(req); - // Otherwise, if the requiring revision is resolved then check - // in its wires for the capability satisfying the requirement. - if (cap == null && rc.getWirings().containsKey(req.getResource())) - { - List<Wire> wires = - rc.getWirings().get(req.getResource()).getRequiredResourceWires(null); - req = getDeclaredRequirement(req); - for (Wire w : wires) - { - if (w.getRequirement().equals(req)) - { -// TODO: RESOLVER - This is not 100% correct, since requirements for -// dynamic imports with wildcards will reside on many wires and -// this code only finds the first one, not necessarily the correct -// one. This is only used for the diagnostic message, but it still -// could confuse the user. - cap = w.getCapability(); - break; - } - } + public WireCandidate(Requirement requirement, Capability capability) + { + this.requirement = requirement; + this.capability = capability; } - - return cap; } - private static class Packages + public static class Packages { - private final Resource m_resource; - public final Map<String, Blame> m_exportedPkgs = new LinkedHashMap<String, Blame>(32); - public final Map<String, List<Blame>> m_importedPkgs = new LinkedHashMap<String, List<Blame>>(32); - public final Map<String, List<Blame>> m_requiredPkgs = new LinkedHashMap<String, List<Blame>>(32); - public final Map<String, Map<Capability, UsedBlames>> m_usedPkgs = new LinkedHashMap<String, Map<Capability, UsedBlames>>(32); - public boolean m_isCalculated = false; + public final OpenHashMap<String, Blame> m_exportedPkgs; + public final OpenHashMap<String, List<Blame>> m_importedPkgs; + public final OpenHashMap<String, List<Blame>> m_requiredPkgs; + public final OpenHashMap<String, ArrayMap<Capability, UsedBlames>> m_usedPkgs; + public final OpenHashMap<Capability, Set<Capability>> m_sources; public Packages(Resource resource) { - m_resource = resource; + int nbCaps = resource.getCapabilities(null).size(); + int nbReqs = resource.getRequirements(null).size(); + + m_exportedPkgs = new OpenHashMap<String, Blame>(nbCaps); + m_importedPkgs = new OpenHashMap<String, List<Blame>>(nbReqs) { + public List<Blame> compute(String s) { + return new ArrayList<Blame>(); + } + }; + m_requiredPkgs = new OpenHashMap<String, List<Blame>>(nbReqs) { + public List<Blame> compute(String s) { + return new ArrayList<Blame>(); + } + }; + m_usedPkgs = new OpenHashMap<String, ArrayMap<Capability, UsedBlames>>(128) { + @Override + protected ArrayMap<Capability, UsedBlames> compute(String s) { + return new ArrayMap<Capability, UsedBlames>() { + @Override + protected UsedBlames compute(Capability key) { + return new UsedBlames(key); + } + }; + } + }; + m_sources = new OpenHashMap<Capability, Set<Capability>>(nbCaps); } } @@ -2267,4 +2216,295 @@ public class ResolverImpl implements Resolver return m_blames.toString(); } } + + private static final class UseConstraintError extends ResolutionError { + + private final ResolveContext m_context; + private final Candidates m_allCandidates; + private final Resource m_resource; + private final String m_pkgName; + private final Blame m_blame1; + private final Blame m_blame2; + + public UseConstraintError(ResolveContext context, Candidates allCandidates, Resource resource, String pkgName, Blame blame) { + this(context, allCandidates, resource, pkgName, blame, null); + } + + public UseConstraintError(ResolveContext context, Candidates allCandidates, Resource resource, String pkgName, Blame blame1, Blame blame2) { + this.m_context = context; + this.m_allCandidates = allCandidates; + this.m_resource = resource; + this.m_pkgName = pkgName; + if (blame1 == null) + { + throw new NullPointerException("First blame cannot be null."); + } + this.m_blame1 = blame1; + this.m_blame2 = blame2; + } + + public String getMessage() { + if (m_blame2 == null) + { + return "Uses constraint violation. Unable to resolve resource " + + Util.getSymbolicName(m_resource) + + " [" + m_resource + + "] because it exports package '" + + m_pkgName + + "' and is also exposed to it from resource " + + Util.getSymbolicName(m_blame1.m_cap.getResource()) + + " [" + m_blame1.m_cap.getResource() + + "] via the following dependency chain:\n\n" + + toStringBlame(m_blame1); + } + else + { + return "Uses constraint violation. Unable to resolve resource " + + Util.getSymbolicName(m_resource) + + " [" + m_resource + + "] because it is exposed to package '" + + m_pkgName + + "' from resources " + + Util.getSymbolicName(m_blame1.m_cap.getResource()) + + " [" + m_blame1.m_cap.getResource() + + "] and " + + Util.getSymbolicName(m_blame2.m_cap.getResource()) + + " [" + m_blame2.m_cap.getResource() + + "] via two dependency chains.\n\nChain 1:\n" + + toStringBlame(m_blame1) + + "\n\nChain 2:\n" + + toStringBlame(m_blame2); + } + } + + public Collection<Requirement> getUnresolvedRequirements() { + if (m_blame2 == null) + { + // This is an export conflict so there is only the first blame; + // use its requirement. + return Collections.singleton(m_blame1.m_reqs.get(0)); + } + else + { + return Collections.singleton(m_blame2.m_reqs.get(0)); + } + } + + private String toStringBlame(Blame blame) + { + StringBuilder sb = new StringBuilder(); + if ((blame.m_reqs != null) && !blame.m_reqs.isEmpty()) + { + for (int i = 0; i < blame.m_reqs.size(); i++) + { + Requirement req = blame.m_reqs.get(i); + sb.append(" "); + sb.append(Util.getSymbolicName(req.getResource())); + sb.append(" ["); + sb.append(req.getResource().toString()); + sb.append("]\n"); + if (req.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) + { + sb.append(" import: "); + } + else + { + sb.append(" require: "); + } + sb.append(req.getDirectives().get(Namespace.REQUIREMENT_FILTER_DIRECTIVE)); + sb.append("\n |"); + if (req.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) + { + sb.append("\n export: "); + } + else + { + sb.append("\n provide: "); + } + if ((i + 1) < blame.m_reqs.size()) + { + Capability cap = getSatisfyingCapability(blame.m_reqs.get(i)); + if (cap.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE)) + { + sb.append(PackageNamespace.PACKAGE_NAMESPACE); + sb.append("="); + sb.append(cap.getAttributes() + .get(PackageNamespace.PACKAGE_NAMESPACE)); + Capability usedCap = + getSatisfyingCapability(blame.m_reqs.get(i + 1)); + sb.append("; uses:="); + sb.append(usedCap.getAttributes() + .get(PackageNamespace.PACKAGE_NAMESPACE)); + } + else + { + sb.append(cap); + } + sb.append("\n"); + } + else + { + Capability export = getSatisfyingCapability(blame.m_reqs.get(i)); + sb.append(export.getNamespace()); + sb.append(": "); + Object namespaceVal = export.getAttributes().get(export.getNamespace()); + if (namespaceVal != null) + { + sb.append(namespaceVal.toString()); + } + else + { + for (Entry<String, Object> attrEntry : export.getAttributes().entrySet()) + { + sb.append(attrEntry.getKey()).append('=') + .append(attrEntry.getValue()).append(';'); + } + } + if (export.getNamespace().equals(PackageNamespace.PACKAGE_NAMESPACE) + && !export.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE) + .equals(blame.m_cap.getAttributes().get( + PackageNamespace.PACKAGE_NAMESPACE))) + { + sb.append("; uses:="); + sb.append(blame.m_cap.getAttributes().get(PackageNamespace.PACKAGE_NAMESPACE)); + sb.append("\n export: "); + sb.append(PackageNamespace.PACKAGE_NAMESPACE); + sb.append("="); + sb.append(blame.m_cap.getAttributes() + .get(PackageNamespace.PACKAGE_NAMESPACE)); + } + sb.append("\n "); + sb.append(Util.getSymbolicName(blame.m_cap.getResource())); + sb.append(" ["); + sb.append(blame.m_cap.getResource().toString()); + sb.append("]"); + } + } + } + else + { + sb.append(blame.m_cap.getResource().toString()); + } + return sb.toString(); + } + + private Capability getSatisfyingCapability(Requirement req) + { + // If the requiring revision is not resolved, then check in the + // candidate map for its matching candidate. + Capability cap = m_allCandidates.getFirstCandidate(req); + // Otherwise, if the requiring revision is resolved then check + // in its wires for the capability satisfying the requirement. + if (cap == null && m_context.getWirings().containsKey(req.getResource())) + { + List<Wire> wires = + m_context.getWirings().get(req.getResource()).getRequiredResourceWires(null); + req = getDeclaredRequirement(req); + for (Wire w : wires) + { + if (w.getRequirement().equals(req)) + { + // TODO: RESOLVER - This is not 100% correct, since requirements for + // dynamic imports with wildcards will reside on many wires and + // this code only finds the first one, not necessarily the correct + // one. This is only used for the diagnostic message, but it still + // could confuse the user. + cap = w.getCapability(); + break; + } + } + } + + return cap; + } + } + + private static class EnhancedExecutor + { + private final Executor executor; + private final AtomicInteger count = new AtomicInteger(); + private Throwable throwable; + + public EnhancedExecutor(Executor executor) + { + this.executor = executor; + } + + public void execute(final Runnable runnable) + { + count.incrementAndGet(); + executor.execute(new Runnable() + { + public void run() + { + try + { + runnable.run(); + } + catch (Throwable t) + { + synchronized (count) + { + if (throwable == null) + { + throwable = t; + } + } + } + finally + { + if (count.decrementAndGet() == 0) + { + synchronized (count) + { + count.notifyAll(); + } + } + } + } + }); + } + + public void await() + { + synchronized (count) + { + if (count.get() > 0) + { + try + { + count.wait(); + } + catch (InterruptedException e) + { + throw new IllegalStateException(e); + } + if (throwable != null) + { + if (throwable instanceof RuntimeException) + { + throw (RuntimeException) throwable; + } + else if (throwable instanceof Error) + { + throw (Error) throwable; + } + else + { + throw new RuntimeException(throwable); + } + } + } + } + } + } + + static class DumbExecutor implements Executor + { + public void execute(Runnable command) + { + command.run(); + } + } + } diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/SimpleHostedCapability.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/SimpleHostedCapability.java index 98e2d4f3d..98e2d4f3d 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/SimpleHostedCapability.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/SimpleHostedCapability.java diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Util.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Util.java index 84de220db..973cd5b72 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Util.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/Util.java @@ -21,6 +21,7 @@ package org.apache.felix.resolver; import java.util.ArrayList; import java.util.List; import org.osgi.framework.Version; +import org.osgi.framework.namespace.BundleNamespace; import org.osgi.framework.namespace.IdentityNamespace; import org.osgi.framework.namespace.PackageNamespace; import org.osgi.resource.Capability; @@ -90,6 +91,12 @@ public class Util .get(Namespace.REQUIREMENT_RESOLUTION_DIRECTIVE)); } + public static boolean isReexport(Requirement req) + { + return BundleNamespace.VISIBILITY_REEXPORT.equals(req.getDirectives() + .get(BundleNamespace.REQUIREMENT_VISIBILITY_DIRECTIVE)); + } + public static List<Requirement> getDynamicRequirements(List<Requirement> reqs) { List<Requirement> result = new ArrayList<Requirement>(); diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WireImpl.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WireImpl.java index a79ac7114..a79ac7114 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WireImpl.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WireImpl.java diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedCapability.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedCapability.java index 9bbec36e2..9bbec36e2 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedCapability.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedCapability.java diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedRequirement.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedRequirement.java index a9d66dfea..a9d66dfea 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedRequirement.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedRequirement.java diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedResource.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedResource.java index e37d23458..3f80ae343 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedResource.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/WrappedResource.java @@ -70,13 +70,7 @@ class WrappedResource implements Resource { for (Capability cap : fragment.getCapabilities(namespace)) { - // Filter out identity capabilities, since they - // are not part of the fragment payload. - if (!cap.getNamespace() - .equals(IdentityNamespace.IDENTITY_NAMESPACE)) - { - caps.add(new WrappedCapability(this, cap)); - } + caps.add(new WrappedCapability(this, cap)); } } } diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/ArrayMap.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/ArrayMap.java new file mode 100755 index 000000000..6ea4cf1fd --- /dev/null +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/ArrayMap.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.felix.resolver.util; + +import java.util.*; + +@SuppressWarnings("NullableProblems") +public class ArrayMap<K, V> extends AbstractMap<K, V> { + + private Object[] table; + private int size; + protected transient Collection<V> values; + + public ArrayMap() { + this(32); + } + + public ArrayMap(int capacity) { + table = new Object[capacity * 2]; + size = 0; + } + + @Override + @SuppressWarnings("unchecked") + public V get(Object key) { + for (int i = 0, l = size << 1; i < l; i += 2) { + if (key.equals(table[i])) { + return (V) table[i + 1]; + } + } + return null; + } + + @Override + @SuppressWarnings("unchecked") + public V put(K key, V value) { + for (int i = 0, l = size << 1; i < l; i += 2) { + if (key.equals(table[i])) { + V old = (V) table[i + 1]; + table[i + 1] = value; + return old; + } + } + if (size * 2 == table.length) { + Object[] n = new Object[table.length * 2]; + System.arraycopy(table, 0, n, 0, table.length); + table = n; + } + int i = size++ << 1; + table[i++] = key; + table[i] = value; + return null; + } + + @SuppressWarnings("unchecked") + public V getOrCompute(K key) { + for (int i = 0, l = size << 1; i < l; i += 2) { + if (key.equals(table[i])) { + return (V) table[i + 1]; + } + } + V v = compute(key); + if (size << 1 == table.length) { + Object[] n = new Object[table.length << 1]; + System.arraycopy(table, 0, n, 0, table.length); + table = n; + } + int i = size++ << 1; + table[i++] = key; + table[i] = v; + return v; + } + + protected V compute(K key) { + throw new UnsupportedOperationException(); + } + + @Override + public Collection<V> values() { + if (values == null) { + values = new AbstractCollection<V>() { + @Override + public Iterator<V> iterator() { + return new Iterator<V>() { + int index = 0; + + public boolean hasNext() { + return index < size; + } + + @SuppressWarnings("unchecked") + public V next() { + if (index >= size) { + throw new NoSuchElementException(); + } + return (V) table[(index++ << 1) + 1]; + } + + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public int size() { + return size; + } + }; + } + return values; + } + + @Override + public Set<Entry<K, V>> entrySet() { + return new AbstractSet<Entry<K, V>>() { + @Override + public Iterator<Entry<K, V>> iterator() { + return new Iterator<Entry<K, V>>() { + FastEntry entry = new FastEntry(); + int index = 0; + + public boolean hasNext() { + return index < size; + } + + public FastEntry next() { + if (index >= size) { + throw new NoSuchElementException(); + } + int i = index << 1; + entry.key = table[i]; + entry.value = table[i + 1]; + index++; + return entry; + } + + public void remove() { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public int size() { + return size; + } + }; + } + + static class FastEntry<K, V> implements Entry<K, V> { + K key; + V value; + + public K getKey() { + return key; + } + + + public V getValue() { + return value; + } + + public V setValue(V value) { + throw new UnsupportedOperationException(); + } + } +} diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/CopyOnWriteList.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/CopyOnWriteList.java index b22ab30e5..b6044341d 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/CopyOnWriteList.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/CopyOnWriteList.java @@ -18,11 +18,15 @@ */ package org.apache.felix.resolver.util; -import java.util.AbstractList; +import java.lang.reflect.Array; import java.util.Arrays; import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; -public class CopyOnWriteList<T> extends AbstractList<T> { +@SuppressWarnings("NullableProblems") +public class CopyOnWriteList<E> implements List<E>, Cloneable { Object[] data; @@ -30,33 +34,32 @@ public class CopyOnWriteList<T> extends AbstractList<T> { data = new Object[0]; } - public CopyOnWriteList(Collection<T> col) { - if (col instanceof CopyOnWriteList) { - data = ((CopyOnWriteList) col).data; - } else { - data = col.toArray(new Object[col.size()]); - } + public CopyOnWriteList(CopyOnWriteList<? extends E> col) { + data = col.data; + } + + public CopyOnWriteList(Collection<? extends E> col) { + data = col.toArray(new Object[col.size()]); } - @Override public int size() { return data.length; } - public T get(int index) { - return (T) data[index]; + @SuppressWarnings("unchecked") + public E get(int index) { + return (E) data[index]; } - @Override - public T set(int index, T element) { - data = Arrays.copyOf(data, data.length); - T prev = (T) data[index]; + @SuppressWarnings("unchecked") + public E set(int index, E element) { + data = CopyOnWriteSet.copyOf(data, data.length); + E prev = (E) data[index]; data[index] = element; return prev; } - @Override - public void add(int index, T element) { + public void add(int index, E element) { Object[] elements = data; int len = elements.length; Object[] newElements = new Object[len + 1]; @@ -71,10 +74,11 @@ public class CopyOnWriteList<T> extends AbstractList<T> { data = newElements; } - public T remove(int index) { + @SuppressWarnings("unchecked") + public E remove(int index) { Object[] elements = data; int len = elements.length; - T oldValue = (T) elements[index]; + E oldValue = (E) elements[index]; Object[] newElements = new Object[len - 1]; int numMoved = len - index - 1; if (index > 0) { @@ -87,8 +91,183 @@ public class CopyOnWriteList<T> extends AbstractList<T> { return oldValue; } + public boolean isEmpty() { + return size() == 0; + } + + public boolean contains(Object o) { + return indexOf(o) >= 0; + } + + public Iterator<E> iterator() { + return new Iterator<E>() { + int idx = 0; + public boolean hasNext() { + return idx < data.length; + } + @SuppressWarnings("unchecked") + public E next() { + return (E) data[idx++]; + } + public void remove() { + CopyOnWriteList.this.remove(--idx); + } + }; + } + + public Object[] toArray() { + return data.clone(); + } + + @SuppressWarnings("unchecked") + public <T> T[] toArray(T[] a) { + int size = data.length; + if (a.length < size) + // Make a new array of a's runtime type, but my contents: + return (T[]) CopyOnWriteSet.copyOf(data, size, a.getClass()); + System.arraycopy(data, 0, a, 0, size); + if (a.length > size) + a[size] = null; + return a; + } + + public boolean add(E e) { + throw new UnsupportedOperationException(); + } + + public boolean remove(Object o) { + int index; + if ((index = indexOf(o)) >= 0) { + remove(index); + return true; + } + return false; + } + + public boolean containsAll(Collection<?> c) { + Object[] elements = data; + int len = elements.length; + for (Object e : c) { + if (indexOf(e, elements, len) < 0) + return false; + } + return true; + } + + private static int indexOf(Object o, Object[] d, int len) { + if (o == null) { + for (int i = len; i-- > 0;) { + if (d[i] == null) + return i; + } + } else { + for (int i = len; i-- > 0;) { + if (o.equals(d[i])) + return i; + } + } + return -1; + } + + public boolean addAll(Collection<? extends E> c) { + throw new UnsupportedOperationException(); + } + + public boolean addAll(int index, Collection<? extends E> c) { + throw new UnsupportedOperationException(); + } + + public boolean removeAll(Collection<?> c) { + boolean modified = false; + Object[] d = data, o = data; + int idx = 0; + for (int i = 0, l = o.length; i < l; i++) { + if (c.contains(o[i])) { + if (!modified) { + d = o.clone(); + idx = i; + modified = true; + } + } else if (modified) { + d[idx++] = o[i]; + } + } + if (modified) { + data = CopyOnWriteSet.copyOf(d, idx); + } + return modified; + } + + public boolean retainAll(Collection<?> c) { + throw new UnsupportedOperationException(); + } + + public void clear() { + data = new Object[0]; + } + + public int indexOf(Object o) { + return indexOf(o, data, data.length); + } + + public int lastIndexOf(Object o) { + throw new UnsupportedOperationException(); + } + + public ListIterator<E> listIterator() { + throw new UnsupportedOperationException(); + } + + public ListIterator<E> listIterator(int index) { + throw new UnsupportedOperationException(); + } + + public List<E> subList(int fromIndex, int toIndex) { + throw new UnsupportedOperationException(); + } + + /** + * Clone this object + * + * @return a cloned object. + */ + @Override + @SuppressWarnings("unchecked") + public CopyOnWriteList<E> clone() { + try { + return (CopyOnWriteList<E>) super.clone(); + } catch (CloneNotSupportedException exc) { + InternalError e = new InternalError(); + e.initCause(exc); + throw e; //should never happen since we are cloneable + } + } + @Override public int hashCode() { return Arrays.hashCode(data); } + + @Override + public boolean equals(Object o) { + if (!(o instanceof CopyOnWriteList)) { + return false; + } + Object[] o1 = data; + Object[] o2 = ((CopyOnWriteList) o).data; + if (o1 == o2) { + return true; + } + int i; + if ((i = o1.length) != o2.length) { + return false; + } + while (i-- > 0) { + Object v1 = o1[i]; + Object v2 = o2[i]; + if (!(v1 == null ? v2 == null : v1.equals(v2))) + return false; + } + return true; + } } diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/CopyOnWriteSet.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/CopyOnWriteSet.java index 239233bf7..92419f3fc 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/CopyOnWriteSet.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/CopyOnWriteSet.java @@ -18,12 +18,15 @@ */ package org.apache.felix.resolver.util; +import java.lang.reflect.Array; import java.util.AbstractSet; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; +import java.util.Set; -public class CopyOnWriteSet<E> extends AbstractSet<E> { +@SuppressWarnings("NullableProblems") +public class CopyOnWriteSet<E> implements Set<E>, Cloneable { Object[] data; @@ -31,15 +34,14 @@ public class CopyOnWriteSet<E> extends AbstractSet<E> { data = new Object[0]; } - public CopyOnWriteSet(Collection<E> col) { - if (col instanceof CopyOnWriteSet) { - data = ((CopyOnWriteSet) col).data; - } else { - data = col.toArray(new Object[col.size()]); - } + public CopyOnWriteSet(CopyOnWriteSet<? extends E> col) { + data = col.data; + } + + public CopyOnWriteSet(Collection<? extends E> col) { + data = col.toArray(new Object[col.size()]); } - @Override public Iterator<E> iterator() { return new Iterator<E>() { int idx = 0; @@ -56,12 +58,10 @@ public class CopyOnWriteSet<E> extends AbstractSet<E> { }; } - @Override public int size() { return data.length; } - @Override public boolean add(E e) { Object[] d = data; if (d.length == 0) { @@ -94,12 +94,49 @@ public class CopyOnWriteSet<E> extends AbstractSet<E> { data = a; } + public Object[] toArray() { + return data.clone(); + } + + @SuppressWarnings("unchecked") + public <T> T[] toArray(T[] a) { + int size = data.length; + if (a.length < size) + // Make a new array of a's runtime type, but my contents: + return (T[]) copyOf(data, size, a.getClass()); + System.arraycopy(data, 0, a, 0, size); + if (a.length > size) + a[size] = null; + return a; + } + @Override public boolean equals(Object o) { - if (o instanceof CopyOnWriteSet && ((CopyOnWriteSet) o).data == data) { + if (!(o instanceof CopyOnWriteSet)) { + return false; + } + Object[] o1 = data; + Object[] o2 = ((CopyOnWriteSet) o).data; + if (o1 == o2) { return true; } - return super.equals(o); + int l = o1.length; + if (l != o2.length) { + return false; + } + loop: + for (int i = l; i-- > 0;) { + Object v1 = o1[i]; + for (int j = l; j-- > 0;) { + Object v2 = o2[j]; + if (v1 == v2) + continue loop; + if (v1 != null && v1.equals(v2)) + continue loop; + } + return false; + } + return true; } @Override @@ -107,4 +144,108 @@ public class CopyOnWriteSet<E> extends AbstractSet<E> { return Arrays.hashCode(data); } + /** + * Clone this object + * + * @return a cloned object. + */ + @Override + @SuppressWarnings("unchecked") + public CopyOnWriteSet<E> clone() { + try { + return (CopyOnWriteSet<E>) super.clone(); + } catch (CloneNotSupportedException exc) { + InternalError e = new InternalError(); + e.initCause(exc); + throw e; //should never happen since we are cloneable + } + } + + public boolean isEmpty() { + return size() == 0; + } + + public boolean contains(Object o) { + throw new UnsupportedOperationException(); + } + + public boolean remove(Object o) { + int index; + if ((index = indexOf(o, data, data.length)) >= 0) { + remove(index); + return true; + } + return false; + } + + private static int indexOf(Object o, Object[] d, int len) { + if (o == null) { + for (int i = len; i-- > 0;) { + if (d[i] == null) + return i; + } + } else { + for (int i = len; i-- > 0;) { + if (o.equals(d[i])) + return i; + } + } + return -1; + } + + public boolean containsAll(Collection<?> c) { + throw new UnsupportedOperationException(); + } + + public boolean addAll(Collection<? extends E> c) { + Object[] cs = c.toArray(); + if (cs.length == 0) + return false; + Object[] elements = data; + int len = elements.length; + int added = 0; + // uniquify and compact elements in cs + for (int i = 0; i < cs.length; ++i) { + Object e = cs[i]; + if (indexOf(e, elements, len) < 0 && + indexOf(e, cs, added) < 0) + cs[added++] = e; + } + if (added > 0) { + Object[] newElements = copyOf(elements, len + added); + System.arraycopy(cs, 0, newElements, len, added); + data = newElements; + return true; + } + return false; + } + + public boolean retainAll(Collection<?> c) { + throw new UnsupportedOperationException(); + } + + public boolean removeAll(Collection<?> c) { + throw new UnsupportedOperationException(); + } + + public void clear() { + throw new UnsupportedOperationException(); + } + + @SuppressWarnings("unchecked") + public static <T> T[] copyOf(T[] original, int newLength) { + return (T[]) copyOf(original, newLength, original.getClass()); + } + + @SuppressWarnings("unchecked") + public static <T,U> T[] copyOf(U[] original, int newLength, Class<? extends T[]> newType) { + T[] copy; + if ((Object) newType == Object[].class) { + copy = (T[]) new Object[newLength]; + } else { + copy = (T[]) Array.newInstance(newType.getComponentType(), newLength); + } + System.arraycopy(original, 0, copy, 0, Math.min(original.length, newLength)); + return copy; + } } diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMap.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMap.java index 8920a6529..edd6823d4 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMap.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMap.java @@ -18,604 +18,1887 @@ */ package org.apache.felix.resolver.util; -import java.util.AbstractMap; -import java.util.AbstractSet; +import java.io.IOException; +import java.io.ObjectInputStream; +import java.io.ObjectOutputStream; +import java.io.Serializable; +import java.lang.reflect.Array; +import java.util.AbstractCollection; import java.util.Arrays; +import java.util.Collection; +import java.util.Comparator; import java.util.Iterator; +import java.util.Map; import java.util.NoSuchElementException; import java.util.Set; +import java.util.SortedMap; +import java.util.SortedSet; /** - * An open addressing map. - * Low memory consumption compared to a HashMap. - * - * Based on the mahout collection classes. + * Based on fastutil Object2ObjectLinkedOpenHashMap */ -public class OpenHashMap<K, V> extends AbstractMap<K, V> implements Cloneable { - - static final int[] primeCapacities = { - 3, 5, 7, 11, 17, 23, 31, 37, 43, 47, 67, 79, 89, 97, 137, 163, 179, 197, 277, 311, - 331, 359, 379, 397, 433, 557, 599, 631, 673, 719, 761, 797, 877, 953, 1039, 1117, - 1201, 1277, 1361, 1439, 1523, 1597, 1759, 1907, 2081, 2237, 2411, 2557, 2729, 2879, - 3049, 3203, 3527, 3821, 4177, 4481, 4831, 5119, 5471, 5779, 6101, 6421, 7057, 7643, - 8363, 8963, 9677, 10243, 10949, 11579, 12203, 12853, 14143, 15287, 16729, 17929, - 19373, 20507, 21911, 23159, 24407, 25717, 28289, 30577, 33461, 35863, 38747, 41017, - 43853, 46327, 48817, 51437, 56591, 61169, 66923, 71741, 77509, 82037, 87719, 92657, - 97649, 102877, 113189, 122347, 133853, 143483, 155027, 164089, 175447, 185323, - 195311, 205759, 226379, 244703, 267713, 286973, 310081, 328213, 350899, 370661, - 390647, 411527, 452759, 489407, 535481, 573953, 620171, 656429, 701819, 741337, - 781301, 823117, 905551, 978821, 1070981, 1147921, 1240361, 1312867, 1403641, 1482707, - 1562611, 1646237, 1811107, 1957651, 2141977, 2295859, 2480729, 2625761, 2807303, - 2965421, 3125257, 3292489, 3622219, 3915341, 4283963, 4591721, 4961459, 5251529, - 5614657, 5930887, 6250537, 6584983, 7244441, 7830701, 8567929, 9183457, 9922933, - 10503061, 11229331, 11861791, 12501169, 13169977, 14488931, 15661423, 17135863, - 18366923, 19845871, 21006137, 22458671, 23723597, 25002389, 26339969, 28977863, - 31322867, 34271747, 36733847, 39691759, 42012281, 44917381, 47447201, 50004791, - 52679969, 57955739, 62645741, 68543509, 73467739, 79383533, 84024581, 89834777, - 94894427, 100009607, 105359939, 115911563, 125291483, 137087021, 146935499, - 158767069, 168049163, 179669557, 189788857, 200019221, 210719881, 231823147, - 250582987, 274174111, 293871013, 317534141, 336098327, 359339171, 379577741, - 400038451, 421439783, 463646329, 501165979, 548348231, 587742049, 635068283, - 672196673, 718678369, 759155483, 800076929, 842879579, 927292699, 1002331963, - 1096696463, 1175484103, 1270136683, 1344393353, 1437356741, 1518310967, - 1600153859, 1685759167, 1854585413, 2004663929, 2147483647 - }; - static final int largestPrime = primeCapacities[primeCapacities.length - 1]; - - protected static final int defaultCapacity = 277; - protected static final double defaultMinLoadFactor = 0.2; - protected static final double defaultMaxLoadFactor = 0.5; - - protected static final Object FREE = null; - protected static final Object REMOVED = new Object(); - - /** The number of distinct associations in the map; its "size()". */ - protected int distinct; - - /** - * The table capacity c=table.length always satisfies the invariant <tt>c * minLoadFactor <= s <= c * - * maxLoadFactor</tt>, where s=size() is the number of associations currently contained. The term "c * minLoadFactor" - * is called the "lowWaterMark", "c * maxLoadFactor" is called the "highWaterMark". In other words, the table capacity - * (and proportionally the memory used by this class) oscillates within these constraints. The terms are precomputed - * and cached to avoid recalculating them each time put(..) or removeKey(...) is called. - */ - protected int lowWaterMark; - protected int highWaterMark; +@SuppressWarnings("NullableProblems") +public class OpenHashMap<K, V> implements Serializable, Cloneable, SortedMap<K, V> { + + private static final long serialVersionUID = 0L; + protected transient Object[] key; + protected transient Object[] value; + protected transient int mask; + protected transient boolean containsNullKey; + protected transient int first; + protected transient int last; + protected transient long[] link; + protected transient int n; + protected transient int maxFill; + protected int size; + protected final float f; + protected V defRetValue; + + protected transient Iterable<Map.Entry<K, V>> fast; + protected transient SortedSet<Map.Entry<K, V>> entries; + protected transient SortedSet<K> keys; + protected transient Collection<V> values; + + public OpenHashMap(int expected, float f) { + this.first = -1; + this.last = -1; + if (f > 0.0F && f <= 1.0F) { + if (expected < 0) { + throw new IllegalArgumentException("The expected number of elements must be nonnegative"); + } else { + this.f = f; + this.n = arraySize(expected, f); + this.mask = this.n - 1; + this.maxFill = maxFill(this.n, f); + this.key = new Object[this.n + 1]; + this.value = new Object[this.n + 1]; + this.link = new long[this.n + 1]; + } + } else { + throw new IllegalArgumentException("Load factor must be greater than 0 and smaller than or equal to 1"); + } + } - /** The minimum load factor for the hashtable. */ - protected double minLoadFactor; + public OpenHashMap(int expected) { + this(expected, 0.75F); + } - /** The maximum load factor for the hashtable. */ - protected double maxLoadFactor; + public OpenHashMap() { + this(16, 0.75F); + } - /** The hash table keys. */ - protected Object[] table; + public OpenHashMap(Map<? extends K, ? extends V> m, float f) { + this(m.size(), f); + this.putAll(m); + } - /** The hash table values. */ - protected Object[] values; + public OpenHashMap(Map<? extends K, ? extends V> m) { + this(m, 0.75F); + } - /** The number of table entries in state==FREE. */ - protected int freeEntries; + public OpenHashMap(K[] k, V[] v, float f) { + this(k.length, f); + if (k.length != v.length) { + throw new IllegalArgumentException("The key array and the value array have different lengths (" + k.length + " and " + v.length + ")"); + } else { + for (int i = 0; i < k.length; ++i) { + this.put(k[i], v[i]); + } + } + } - /** Constructs an empty map with default capacity and default load factors. */ - public OpenHashMap() { - this(defaultCapacity); + public OpenHashMap(K[] k, V[] v) { + this(k, v, 0.75F); } - /** - * Constructs an empty map with the specified initial capacity and default load factors. - * - * @param initialCapacity the initial capacity of the map. - * @throws IllegalArgumentException if the initial capacity is less than zero. - */ - public OpenHashMap(int initialCapacity) { - this(initialCapacity, defaultMinLoadFactor, defaultMaxLoadFactor); + public void defaultReturnValue(V rv) { + this.defRetValue = rv; } - /** - * Constructs an empty map with the specified initial capacity and the specified minimum and maximum load factor. - * - * @param initialCapacity the initial capacity. - * @param minLoadFactor the minimum load factor. - * @param maxLoadFactor the maximum load factor. - * @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) || - * (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >= - * maxLoadFactor)</tt>. - */ - public OpenHashMap(int initialCapacity, double minLoadFactor, double maxLoadFactor) { - setUp(initialCapacity, minLoadFactor, maxLoadFactor); + public V defaultReturnValue() { + return this.defRetValue; } - /** Removes all (key,value) associations from the receiver. Implicitly calls <tt>trimToSize()</tt>. */ - @Override - public void clear() { - Arrays.fill(this.table, FREE); - Arrays.fill(this.values, null); - distinct = 0; - freeEntries = table.length; // delta - trimToSize(); + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (!(o instanceof Map)) { + return false; + } else { + Map m = (Map) o; + int n = m.size(); + if (this.size() != n) { + return false; + } + Iterator<? extends Entry<?, ?>> i = this.fast().iterator(); + while (n-- > 0) { + Entry e = i.next(); + Object k = e.getKey(); + Object v = e.getValue(); + Object v2 = m.get(k); + if (v == null) { + if (v2 != null) { + return false; + } + } else if (!v.equals(v2)) { + return false; + } + } + return true; + } } - /** - * Returns a deep copy of the receiver. - * - * @return a deep copy of the receiver. - */ - @Override - @SuppressWarnings("unchecked") - public OpenHashMap<K, V> clone() { - try { - OpenHashMap<K,V> copy = (OpenHashMap<K,V>) super.clone(); - copy.table = copy.table.clone(); - copy.values = copy.values.clone(); - return copy; - } catch (CloneNotSupportedException exc) { - InternalError e = new InternalError(); - e.initCause(exc); - throw e; //should never happen since we are cloneable + public String toString() { + StringBuilder s = new StringBuilder(); + Iterator<Map.Entry<K, V>> i = this.fast().iterator(); + int n = this.size(); + boolean first = true; + s.append("{"); + + while (n-- != 0) { + if (first) { + first = false; + } else { + s.append(", "); + } + + Map.Entry<K, V> e = i.next(); + if (this == e.getKey()) { + s.append("(this map)"); + } else { + s.append(String.valueOf(e.getKey())); + } + + s.append("=>"); + if (this == e.getValue()) { + s.append("(this map)"); + } else { + s.append(String.valueOf(e.getValue())); + } } + + s.append("}"); + return s.toString(); } - /** - * Returns <tt>true</tt> if the receiver contains the specified key. - * - * @return <tt>true</tt> if the receiver contains the specified key. - */ - @SuppressWarnings("unchecked") - @Override - public boolean containsKey(Object key) { - return indexOfKey((K) key) >= 0; + private int realSize() { + return this.containsNullKey ? this.size - 1 : this.size; } - /** - * Returns <tt>true</tt> if the receiver contains the specified value. - * - * @return <tt>true</tt> if the receiver contains the specified value. - */ - @SuppressWarnings("unchecked") - @Override - public boolean containsValue(Object value) { - return indexOfValue((V)value) >= 0; + private void ensureCapacity(int capacity) { + int needed = arraySize(capacity, this.f); + if (needed > this.n) { + this.rehash(needed); + } + } - /** - * Ensures that the receiver can hold at least the specified number of associations without needing to allocate new - * internal memory. If necessary, allocates new internal memory and increases the capacity of the receiver. <p> This - * method never need be called; it is for performance tuning only. Calling this method before <tt>put()</tt>ing a - * large number of associations boosts performance, because the receiver will grow only once instead of potentially - * many times and hash collisions get less probable. - * - * @param minCapacity the desired minimum capacity. - */ - public void ensureCapacity(int minCapacity) { - if (table.length < minCapacity) { - int newCapacity = nextPrime(minCapacity); - rehash(newCapacity); + private void tryCapacity(long capacity) { + int needed = (int) Math.min(1073741824L, Math.max(2L, nextPowerOfTwo((long) Math.ceil((double) ((float) capacity / this.f))))); + if (needed > this.n) { + this.rehash(needed); } + } - /** - * Returns the value associated with the specified key. It is often a good idea to first check with {@link - * #containsKey(Object)} whether the given key has a value associated or not, i.e. whether there exists an association - * for the given key or not. - * - * @param key the key to be searched for. - * @return the value associated with the specified key; <tt>0</tt> if no such key is present. - */ @SuppressWarnings("unchecked") - @Override - public V get(Object key) { - int i = indexOfKey((K)key); - if (i < 0) { - return null; - } //not contained - return (V)values[i]; + private V removeEntry(int pos) { + Object oldValue = this.value[pos]; + this.value[pos] = null; + --this.size; + this.fixPointers(pos); + this.shiftKeys(pos); + if (this.size < this.maxFill / 4 && this.n > 16) { + this.rehash(this.n / 2); + } + + return (V) oldValue; } - /** - * @param key the key to be added to the receiver. - * @return the index where the key would need to be inserted, if it is not already contained. Returns -index-1 if the - * key is already contained at slot index. Therefore, if the returned index < 0, then it is already contained - * at slot -index-1. If the returned index >= 0, then it is NOT already contained and should be inserted at - * slot index. - */ - protected int indexOfInsertion(K key) { - Object[] tab = table; - int length = tab.length; + @SuppressWarnings("unchecked") + private V removeNullEntry() { + this.containsNullKey = false; + Object oldValue = this.value[this.n]; + this.value[this.n] = null; + --this.size; + this.fixPointers(this.n); + if (this.size < this.maxFill / 4 && this.n > 16) { + this.rehash(this.n / 2); + } - int hash = key.hashCode() & 0x7FFFFFFF; - int i = hash % length; - int decrement = hash % (length - 2); // double hashing, see http://www.eece.unm.edu/faculty/heileman/hash/node4.html - //int decrement = (hash / length) % length; - if (decrement == 0) { - decrement = 1; + return (V) oldValue; + } + + public void putAll(Map<? extends K, ? extends V> m) { + if ((double) this.f <= 0.5D) { + this.ensureCapacity(m.size()); + } else { + this.tryCapacity((long) (this.size() + m.size())); } - // stop if we find a removed or free slot, or if we find the key itself - // do NOT skip over removed slots (yes, open addressing is like that...) - while (table[i] != FREE && table[i] != REMOVED && !equalsMindTheNull(key, tab[i])) { - i -= decrement; - //hashCollisions++; - if (i < 0) { - i += length; + int n = m.size(); + if (m instanceof OpenHashMap) { + Iterator<? extends Map.Entry<? extends K, ? extends V>> i = ((OpenHashMap) m).fast().iterator(); + while (n-- != 0) { + Map.Entry<? extends K, ? extends V> e = i.next(); + this.put(e.getKey(), e.getValue()); + } + } else { + Iterator<? extends Map.Entry<? extends K, ? extends V>> i = m.entrySet().iterator(); + while (n-- != 0) { + Map.Entry<? extends K, ? extends V> e = i.next(); + this.put(e.getKey(), e.getValue()); } } + } - if (table[i] == REMOVED) { - // stop if we find a free slot, or if we find the key itself. - // do skip over removed slots (yes, open addressing is like that...) - // assertion: there is at least one FREE slot. - int j = i; - while (table[i] != FREE && (table[i] == REMOVED || tab[i] != key)) { - i -= decrement; - //hashCollisions++; - if (i < 0) { - i += length; - } + private int insert(K k, V v) { + int pos; + if (k == null) { + if (this.containsNullKey) { + return this.n; } - if (table[i] == FREE) { - i = j; + + this.containsNullKey = true; + pos = this.n; + } else { + Object[] key = this.key; + Object curr; + if ((curr = key[pos = mix(k.hashCode()) & this.mask]) != null) { + if (curr.equals(k)) { + return pos; + } + + while ((curr = key[pos = pos + 1 & this.mask]) != null) { + if (curr.equals(k)) { + return pos; + } + } } + + key[pos] = k; } + this.value[pos] = v; + if (this.size == 0) { + this.first = this.last = pos; + this.link[pos] = -1L; + } else { + this.link[this.last] ^= (this.link[this.last] ^ (long) pos & 0xFFFFFFFFL) & 0xFFFFFFFFL; + this.link[pos] = ((long) this.last & 0xFFFFFFFFL) << 32 | 0xFFFFFFFFL; + this.last = pos; + } - if (table[i] != FREE && table[i] != REMOVED) { - // key already contained at slot i. - // return a negative number identifying the slot. - return -i - 1; + if (this.size++ >= this.maxFill) { + this.rehash(arraySize(this.size + 1, this.f)); } - // not already contained, should be inserted at slot i. - // return a number >= 0 identifying the slot. - return i; - } - /** - * @param key the key to be searched in the receiver. - * @return the index where the key is contained in the receiver, returns -1 if the key was not found. - */ - protected int indexOfKey(K key) { - Object[] tab = table; - int length = tab.length; + return -1; + } - int hash = key.hashCode() & 0x7FFFFFFF; - int i = hash % length; - int decrement = hash % (length - 2); // double hashing, see http://www.eece.unm.edu/faculty/heileman/hash/node4.html - //int decrement = (hash / length) % length; - if (decrement == 0) { - decrement = 1; + @SuppressWarnings("unchecked") + public V put(K k, V v) { + int pos = this.insert(k, v); + if (pos < 0) { + return this.defRetValue; + } else { + Object oldValue = this.value[pos]; + this.value[pos] = v; + return (V) oldValue; } + } - // stop if we find a free slot, or if we find the key itself. - // do skip over removed slots (yes, open addressing is like that...) - while (tab[i] != FREE && (tab[i] == REMOVED || !equalsMindTheNull(key, tab[i]))) { - i -= decrement; - //hashCollisions++; - if (i < 0) { - i += length; + @SuppressWarnings("unchecked") + public V getOrCompute(K k) { + int pos; + if (k == null) { + if (this.containsNullKey) { + return (V) this.value[this.n]; } + + this.containsNullKey = true; + pos = this.n; + } else { + Object[] key = this.key; + Object curr; + if ((curr = key[pos = mix(k.hashCode()) & this.mask]) != null) { + if (curr.equals(k)) { + return (V) this.value[pos]; + } + + while ((curr = key[pos = pos + 1 & this.mask]) != null) { + if (curr.equals(k)) { + return (V) this.value[pos]; + } + } + } + + key[pos] = k; + } + + Object v; + this.value[pos] = (v = compute(k)); + if (this.size == 0) { + this.first = this.last = pos; + this.link[pos] = -1L; + } else { + this.link[this.last] ^= (this.link[this.last] ^ (long) pos & 0xFFFFFFFFL) & 0xFFFFFFFFL; + this.link[pos] = ((long) this.last & 0xFFFFFFFFL) << 32 | 0xFFFFFFFFL; + this.last = pos; } - if (tab[i] == FREE) { - return -1; - } // not found - return i; //found, return index where key is contained + if (this.size++ >= this.maxFill) { + this.rehash(arraySize(this.size + 1, this.f)); + } + + return (V) v; } - /** - * @param value the value to be searched in the receiver. - * @return the index where the value is contained in the receiver, returns -1 if the value was not found. - */ - protected int indexOfValue(V value) { - Object[] val = values; + protected V compute(K k) { + throw new UnsupportedOperationException(); + } + + protected final void shiftKeys(int pos) { + Object[] key = this.key; + + label32: + while (true) { + int last = pos; + + Object curr; + for (pos = pos + 1 & this.mask; (curr = key[pos]) != null; pos = pos + 1 & this.mask) { + int slot = mix(curr.hashCode()) & this.mask; + if (last <= pos) { + if (last < slot && slot <= pos) { + continue; + } + } else if (last < slot || slot <= pos) { + continue; + } - for (int i = values.length; --i >= 0;) { - if (table[i] != FREE && table[i] != REMOVED && equalsMindTheNull(val[i], value)) { - return i; + key[last] = curr; + this.value[last] = this.value[pos]; + this.fixPointers(pos, last); + continue label32; } + + key[last] = null; + this.value[last] = null; + return; } + } - return -1; // not found + public V remove(Object k) { + if (k == null) { + return this.containsNullKey ? this.removeNullEntry() : this.defRetValue; + } else { + Object[] key = this.key; + Object curr; + int pos; + if ((curr = key[pos = mix(k.hashCode()) & this.mask]) == null) { + return this.defRetValue; + } else if (k.equals(curr)) { + return this.removeEntry(pos); + } else { + while ((curr = key[pos = pos + 1 & this.mask]) != null) { + if (k.equals(curr)) { + return this.removeEntry(pos); + } + } + + return this.defRetValue; + } + } } - /** - * Associates the given key with the given value. Replaces any old <tt>(key,someOtherValue)</tt> association, if - * existing. - * - * @param key the key the value shall be associated with. - * @param value the value to be associated. - * @return <tt>true</tt> if the receiver did not already contain such a key; <tt>false</tt> if the receiver did - * already contain such a key - the new value has now replaced the formerly associated value. - */ @SuppressWarnings("unchecked") - @Override - public V put(K key, V value) { - int i = indexOfInsertion(key); - if (i < 0) { //already contained - i = -i - 1; - V previous = (V) this.values[i]; - this.values[i] = value; - return previous; + private V setValue(int pos, V v) { + Object oldValue = this.value[pos]; + this.value[pos] = v; + return (V) oldValue; + } + + @SuppressWarnings("unchecked") + public V removeFirst() { + if (this.size == 0) { + throw new NoSuchElementException(); + } else { + int pos = this.first; + this.first = (int) this.link[pos]; + if (0 <= this.first) { + this.link[this.first] |= 0xFFFFFFFF00000000L; + } + + --this.size; + Object v = this.value[pos]; + if (pos == this.n) { + this.containsNullKey = false; + this.value[this.n] = null; + } else { + this.shiftKeys(pos); + } + + if (this.size < this.maxFill / 4 && this.n > 16) { + this.rehash(this.n / 2); + } + + return (V) v; } + } + + @SuppressWarnings("unchecked") + public V removeLast() { + if (this.size == 0) { + throw new NoSuchElementException(); + } else { + int pos = this.last; + this.last = (int) (this.link[pos] >>> 32); + if (0 <= this.last) { + this.link[this.last] |= 0xFFFFFFFFL; + } + + --this.size; + Object v = this.value[pos]; + if (pos == this.n) { + this.containsNullKey = false; + this.value[this.n] = null; + } else { + this.shiftKeys(pos); + } + + if (this.size < this.maxFill / 4 && this.n > 16) { + this.rehash(this.n / 2); + } - if (this.distinct > this.highWaterMark) { - int newCapacity = chooseGrowCapacity(this.distinct + 1, this.minLoadFactor, this.maxLoadFactor); - rehash(newCapacity); - return put(key, value); + return (V) v; } + } + + private void moveIndexToFirst(int i) { + if (this.size != 1 && this.first != i) { + if (this.last == i) { + this.last = (int) (this.link[i] >>> 32); + this.link[this.last] |= 0xFFFFFFFFL; + } else { + long linki = this.link[i]; + int prev = (int) (linki >>> 32); + int next = (int) linki; + this.link[prev] ^= (this.link[prev] ^ linki & 0xFFFFFFFFL) & 0xFFFFFFFFL; + this.link[next] ^= (this.link[next] ^ linki & 0xFFFFFFFF00000000L) & 0xFFFFFFFF00000000L; + } - if (this.table[i] == FREE) { - this.freeEntries--; + this.link[this.first] ^= (this.link[this.first] ^ ((long) i & 0xFFFFFFFFL) << 32) & 0xFFFFFFFF00000000L; + this.link[i] = 0xFFFFFFFF00000000L | (long) this.first & 0xFFFFFFFFL; + this.first = i; } - this.table[i] = key; - this.values[i] = value; - this.distinct++; + } - if (this.freeEntries < 1) { //delta - int newCapacity = chooseGrowCapacity(this.distinct + 1, this.minLoadFactor, this.maxLoadFactor); - rehash(newCapacity); + private void moveIndexToLast(int i) { + if (this.size != 1 && this.last != i) { + if (this.first == i) { + this.first = (int) this.link[i]; + this.link[this.first] |= 0xFFFFFFFF00000000L; + } else { + long linki = this.link[i]; + int prev = (int) (linki >>> 32); + int next = (int) linki; + this.link[prev] ^= (this.link[prev] ^ linki & 0xFFFFFFFFL) & 0xFFFFFFFFL; + this.link[next] ^= (this.link[next] ^ linki & 0xFFFFFFFF00000000L) & 0xFFFFFFFF00000000L; + } + + this.link[this.last] ^= (this.link[this.last] ^ (long) i & 0xFFFFFFFFL) & 0xFFFFFFFFL; + this.link[i] = ((long) this.last & 0xFFFFFFFFL) << 32 | 0xFFFFFFFFL; + this.last = i; } + } - return null; + @SuppressWarnings("unchecked") + public V getAndMoveToFirst(K k) { + if (k == null) { + if (this.containsNullKey) { + this.moveIndexToFirst(this.n); + return (V) this.value[this.n]; + } else { + return this.defRetValue; + } + } else { + Object[] key = this.key; + Object curr; + int pos; + if ((curr = key[pos = mix(k.hashCode()) & this.mask]) == null) { + return this.defRetValue; + } else if (k.equals(curr)) { + this.moveIndexToFirst(pos); + return (V) this.value[pos]; + } else { + while ((curr = key[pos = pos + 1 & this.mask]) != null) { + if (k.equals(curr)) { + this.moveIndexToFirst(pos); + return (V) this.value[pos]; + } + } + + return this.defRetValue; + } + } } - /** - * Rehashes the contents of the receiver into a new table with a smaller or larger capacity. This method is called - * automatically when the number of keys in the receiver exceeds the high water mark or falls below the low water - * mark. - */ @SuppressWarnings("unchecked") - protected void rehash(int newCapacity) { - int oldCapacity = table.length; - //if (oldCapacity == newCapacity) return; + public V getAndMoveToLast(K k) { + if (k == null) { + if (this.containsNullKey) { + this.moveIndexToLast(this.n); + return (V) this.value[this.n]; + } else { + return this.defRetValue; + } + } else { + Object[] key = this.key; + Object curr; + int pos; + if ((curr = key[pos = mix(k.hashCode()) & this.mask]) == null) { + return this.defRetValue; + } else if (k.equals(curr)) { + this.moveIndexToLast(pos); + return (V) this.value[pos]; + } else { + while ((curr = key[pos = pos + 1 & this.mask]) != null) { + if (k.equals(curr)) { + this.moveIndexToLast(pos); + return (V) this.value[pos]; + } + } + + return this.defRetValue; + } + } + } + + public V putAndMoveToFirst(K k, V v) { + int pos; + if (k == null) { + if (this.containsNullKey) { + this.moveIndexToFirst(this.n); + return this.setValue(this.n, v); + } + + this.containsNullKey = true; + pos = this.n; + } else { + Object[] key = this.key; + Object curr; + if ((curr = key[pos = mix(k.hashCode()) & this.mask]) != null) { + if (curr.equals(k)) { + this.moveIndexToFirst(pos); + return this.setValue(pos, v); + } + + while ((curr = key[pos = pos + 1 & this.mask]) != null) { + if (curr.equals(k)) { + this.moveIndexToFirst(pos); + return this.setValue(pos, v); + } + } + } + + key[pos] = k; + } + + this.value[pos] = v; + if (this.size == 0) { + this.first = this.last = pos; + this.link[pos] = -1L; + } else { + this.link[this.first] ^= (this.link[this.first] ^ ((long) pos & 0xFFFFFFFFL) << 32) & 0xFFFFFFFF00000000L; + this.link[pos] = 0xFFFFFFFF00000000L | (long) this.first & 0xFFFFFFFFL; + this.first = pos; + } - Object[] oldTable = table; - Object[] oldValues = values; + if (this.size++ >= this.maxFill) { + this.rehash(arraySize(this.size, this.f)); + } - Object[] newTable = new Object[newCapacity]; - Object[] newValues = new Object[newCapacity]; + return this.defRetValue; + } - this.lowWaterMark = chooseLowWaterMark(newCapacity, this.minLoadFactor); - this.highWaterMark = chooseHighWaterMark(newCapacity, this.maxLoadFactor); + public V putAndMoveToLast(K k, V v) { + int pos; + if (k == null) { + if (this.containsNullKey) { + this.moveIndexToLast(this.n); + return this.setValue(this.n, v); + } - this.table = newTable; - this.values = newValues; - this.freeEntries = newCapacity - this.distinct; // delta + this.containsNullKey = true; + pos = this.n; + } else { + Object[] key = this.key; + Object curr; + if ((curr = key[pos = mix(k.hashCode()) & this.mask]) != null) { + if (curr.equals(k)) { + this.moveIndexToLast(pos); + return this.setValue(pos, v); + } - for (int i = oldCapacity; i-- > 0;) { - if (oldTable[i] != FREE && oldTable[i] != REMOVED) { - Object element = oldTable[i]; - int index = indexOfInsertion((K)element); - newTable[index] = element; - newValues[index] = oldValues[i]; + while ((curr = key[pos = pos + 1 & this.mask]) != null) { + if (curr.equals(k)) { + this.moveIndexToLast(pos); + return this.setValue(pos, v); + } + } } + + key[pos] = k; + } + + this.value[pos] = v; + if (this.size == 0) { + this.first = this.last = pos; + this.link[pos] = -1L; + } else { + this.link[this.last] ^= (this.link[this.last] ^ (long) pos & 0xFFFFFFFFL) & 0xFFFFFFFFL; + this.link[pos] = ((long) this.last & 0xFFFFFFFFL) << 32 | 0xFFFFFFFFL; + this.last = pos; } + + if (this.size++ >= this.maxFill) { + this.rehash(arraySize(this.size, this.f)); + } + + return this.defRetValue; } - /** - * Removes the given key with its associated element from the receiver, if present. - * - * @param key the key to be removed from the receiver. - * @return <tt>true</tt> if the receiver contained the specified key, <tt>false</tt> otherwise. - */ @SuppressWarnings("unchecked") - @Override - public V remove(Object key) { - int i = indexOfKey((K)key); - if (i < 0) { - return null; + public V get(Object k) { + if (k == null) { + return containsNullKey ? (V) value[n] : defRetValue; } - // key not contained - V removed = (V) values[i]; - this.table[i] = REMOVED; - this.values[i] = null; - this.distinct--; + final Object[] key = this.key; + Object curr; + int pos; - if (this.distinct < this.lowWaterMark) { - int newCapacity = chooseShrinkCapacity(this.distinct, this.minLoadFactor, this.maxLoadFactor); - rehash(newCapacity); + // The starting point + if ((curr = key[pos = mix(k.hashCode()) & mask]) == null) { + return defRetValue; + } + if (k.equals(curr)) { + return (V) value[pos]; } - return removed; + // There's always an usused entry + while (true) { + if ((curr = key[pos = (pos + 1) & mask]) == null) { + return defRetValue; + } + if (k.equals(curr)) { + return (V) value[pos]; + } + } } - /** - * Initializes the receiver. - * - * @param initialCapacity the initial capacity of the receiver. - * @param minLoadFactor the minLoadFactor of the receiver. - * @param maxLoadFactor the maxLoadFactor of the receiver. - * @throws IllegalArgumentException if <tt>initialCapacity < 0 || (minLoadFactor < 0.0 || minLoadFactor >= 1.0) || - * (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) || (minLoadFactor >= - * maxLoadFactor)</tt>. - */ - protected void setUp(int initialCapacity, double minLoadFactor, double maxLoadFactor) { - if (initialCapacity < 0) { - throw new IllegalArgumentException("Initial Capacity must not be less than zero: " + initialCapacity); + public boolean containsKey(Object k) { + if (k == null) { + return this.containsNullKey; + } else { + Object[] key = this.key; + Object curr; + int pos; + if ((curr = key[pos = mix(k.hashCode()) & this.mask]) == null) { + return false; + } else if (k.equals(curr)) { + return true; + } else { + while ((curr = key[pos = pos + 1 & this.mask]) != null) { + if (k.equals(curr)) { + return true; + } + } + + return false; + } } - if (minLoadFactor < 0.0 || minLoadFactor >= 1.0) { - throw new IllegalArgumentException("Illegal minLoadFactor: " + minLoadFactor); + } + + public boolean containsValue(Object v) { + Object[] value = this.value; + Object[] key = this.key; + if (containsNullKey && (value[n] == null && v == null) || value[n].equals(v)) { + return true; } - if (maxLoadFactor <= 0.0 || maxLoadFactor >= 1.0) { - throw new IllegalArgumentException("Illegal maxLoadFactor: " + maxLoadFactor); + for (int i = n; i-- != 0;) { + if (!(key[i] == null) && (value[i] == null && v == null) || value[i].equals(v)) { + return true; + } } - if (minLoadFactor >= maxLoadFactor) { - throw new IllegalArgumentException( - "Illegal minLoadFactor: " + minLoadFactor + " and maxLoadFactor: " + maxLoadFactor); + return false; + } + + public void clear() { + if (size != 0) { + size = 0; + containsNullKey = false; + Arrays.fill(key, (Object) null); + Arrays.fill(value, (Object) null); + first = last = -1; } - int capacity = initialCapacity; - capacity = nextPrime(capacity); - if (capacity == 0) { - capacity = 1; - } // open addressing needs at least one FREE slot at any time. + } - this.table = new Object[capacity]; - this.values = new Object[capacity]; + public int size() { + return this.size; + } + + public boolean isEmpty() { + return this.size == 0; + } - // memory will be exhausted long before this pathological case happens, anyway. - this.minLoadFactor = minLoadFactor; - if (capacity == largestPrime) { - this.maxLoadFactor = 1.0; + protected void fixPointers(int i) { + if (size == 0) { + first = last = -1; + } else if (first == i) { + first = (int) link[i]; + if (0 <= first) { + link[first] |= 0xFFFFFFFF00000000L; + } + } else if (last == i) { + last = (int) (link[i] >>> 32); + if (0 <= last) { + link[last] |= 0xFFFFFFFFL; + } } else { - this.maxLoadFactor = maxLoadFactor; + long linki = link[i]; + int prev = (int) (linki >>> 32); + int next = (int) linki; + link[prev] ^= (link[prev] ^ linki & 0xFFFFFFFFL) & 0xFFFFFFFFL; + link[next] ^= (link[next] ^ linki & 0xFFFFFFFF00000000L) & 0xFFFFFFFF00000000L; } + } - this.distinct = 0; - this.freeEntries = capacity; // delta + protected void fixPointers(int s, int d) { + if (size == 1) { + first = last = d; + link[d] = -1L; + } else if (first == s) { + first = d; + link[(int) link[s]] ^= (link[(int) link[s]] ^ ((long) d & 0xFFFFFFFFL) << 32) & 0xFFFFFFFF00000000L; + link[d] = link[s]; + } else if (last == s) { + last = d; + link[(int) (link[s] >>> 32)] ^= (link[(int) (link[s] >>> 32)] ^ (long) d & 0xFFFFFFFFL) & 0xFFFFFFFFL; + link[d] = link[s]; + } else { + long links = link[s]; + int prev = (int) (links >>> 32); + int next = (int) links; + link[prev] ^= (link[prev] ^ (long) d & 0xFFFFFFFFL) & 0xFFFFFFFFL; + link[next] ^= (link[next] ^ ((long) d & 0xFFFFFFFFL) << 32) & 0xFFFFFFFF00000000L; + link[d] = links; + } + } - // lowWaterMark will be established upon first expansion. - // establishing it now (upon instance construction) would immediately make the table shrink upon first put(...). - // After all the idea of an "initialCapacity" implies violating lowWaterMarks when an object is young. - // See ensureCapacity(...) - this.lowWaterMark = 0; - this.highWaterMark = chooseHighWaterMark(capacity, this.maxLoadFactor); + @SuppressWarnings("unchecked") + public K firstKey() { + if (size == 0) { + throw new NoSuchElementException(); + } else { + return (K) key[first]; + } } - /** - * Trims the capacity of the receiver to be the receiver's current size. Releases any superfluous internal memory. An - * application can use this operation to minimize the storage of the receiver. - */ - public void trimToSize() { - // * 1.2 because open addressing's performance exponentially degrades beyond that point - // so that even rehashing the table can take very long - int newCapacity = nextPrime((int) (1 + 1.2 * size())); - if (table.length > newCapacity) { - rehash(newCapacity); + @SuppressWarnings("unchecked") + public K lastKey() { + if (size == 0) { + throw new NoSuchElementException(); + } else { + return (K) key[last]; + } + } + + public Comparator<? super K> comparator() { + return null; + } + + public SortedMap<K, V> tailMap(K from) { + throw new UnsupportedOperationException(); + } + + public SortedMap<K, V> headMap(K to) { + throw new UnsupportedOperationException(); + } + + public SortedMap<K, V> subMap(K from, K to) { + throw new UnsupportedOperationException(); + } + + public Iterable<Map.Entry<K, V>> fast() { + if (fast == null) { + fast = new Iterable<Entry<K, V>>() { + public Iterator<Entry<K, V>> iterator() { + return new FastEntryIterator(); + } + }; } + + return fast; } - public void concat() { - int newCap = nextPrime(size() + 1); // +1 as we always need a free slot - if (newCap != table.length) { - rehash(newCap); + public SortedSet<Map.Entry<K, V>> entrySet() { + if (entries == null) { + entries = new OpenHashMap.MapEntrySet(); } + + return this.entries; } - /** - * Allocate a set to contain Map.Entry objects for the pairs and return it. - */ - @Override - public Set<Entry<K,V>> entrySet() { - return new EntrySet(); + public SortedSet<K> keySet() { + if (keys == null) { + keys = new OpenHashMap.KeySet(); + } + + return keys; } - /** - * Chooses a new prime table capacity optimized for growing that (approximately) satisfies the invariant <tt>c * - * minLoadFactor <= size <= c * maxLoadFactor</tt> and has at least one FREE slot for the given size. - */ - protected int chooseGrowCapacity(int size, double minLoad, double maxLoad) { - return nextPrime(Math.max(size + 1, (int) ((4 * size / (3 * minLoad + maxLoad))))); + public Collection<V> values() { + if (values == null) { + values = new AbstractObjectCollection<V>() { + public Iterator<V> iterator() { + return new ValueIterator(); + } + + public int size() { + return size; + } + + public boolean contains(Object v) { + return containsValue(v); + } + + public void clear() { + OpenHashMap.this.clear(); + } + }; + } + + return values; } - /** - * Returns new high water mark threshold based on current capacity and maxLoadFactor. + /** Rehashes the map, making the table as small as possible. + * + * <P>This method rehashes the table to the smallest size satisfying the + * load factor. It can be used when the set will not be changed anymore, so + * to optimize access speed and size. * - * @return int the new threshold. + * <P>If the table size is already the minimum possible, this method + * does nothing. + * + * @return true if there was enough memory to trim the map. + * @see #trim(int) */ - protected int chooseHighWaterMark(int capacity, double maxLoad) { - return Math.min(capacity - 2, (int) (capacity * maxLoad)); //makes sure there is always at least one FREE slot + public boolean trim() { + int l = arraySize(size, f); + if (l >= n) { + return true; + } else { + try { + rehash(l); + return true; + } catch (OutOfMemoryError cantDoIt) { + return false; + } + } } - /** - * Returns new low water mark threshold based on current capacity and minLoadFactor. + /** Rehashes this map if the table is too large. * - * @return int the new threshold. + * <P>Let <var>N</var> be the smallest table size that can hold + * <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current + * table size is smaller than or equal to <var>N</var>, this method does + * nothing. Otherwise, it rehashes this map in a table of size + * <var>N</var>. + * + * <P>This method is useful when reusing maps. {@linkplain #clear() Clearing a + * map} leaves the table size untouched. If you are reusing a map + * many times, you can call this method with a typical + * size to avoid keeping around a very large table just + * because of a few large transient maps. + * + * @param n the threshold for the trimming. + * @return true if there was enough memory to trim the map. + * @see #trim() */ - protected int chooseLowWaterMark(int capacity, double minLoad) { - return (int) (capacity * minLoad); + public boolean trim(int n) { + int l = nextPowerOfTwo((int) Math.ceil((double) ((float) n / f))); + if (n <= l) { + return true; + } else { + try { + rehash(l); + return true; + } catch (OutOfMemoryError cantDoIt) { + return false; + } + } } - /** - * Chooses a new prime table capacity optimized for shrinking that (approximately) satisfies the invariant <tt>c * - * minLoadFactor <= size <= c * maxLoadFactor</tt> and has at least one FREE slot for the given size. + /** Rehashes the map. + * + * <P>This method implements the basic rehashing strategy, and may be + * overriden by subclasses implementing different rehashing strategies (e.g., + * disk-based rehashing). However, you should not override this method + * unless you understand the internal workings of this class. + * + * @param newN the new size */ - protected int chooseShrinkCapacity(int size, double minLoad, double maxLoad) { - return nextPrime(Math.max(size + 1, (int) ((4 * size / (minLoad + 3 * maxLoad))))); + protected void rehash(int newN) { + Object[] key = this.key; + Object[] value = this.value; + + int mask = newN - 1; + Object[] newKey = new Object[newN + 1]; + Object[] newValue = new Object[newN + 1]; + + int i = first, prev = -1, newPrev = -1, t, pos; + final long[] link = this.link; + final long[] newLink = new long[newN + 1]; + first = -1; + + for (int j = size; j-- != 0;) { + if (key[i] == null) { + pos = newN; + } else { + pos = mix(key[i].hashCode()) & mask; + while (newKey[pos] != null) { + pos = ( pos + 1 ) & mask; + } + newKey[pos] = key[i]; + } + + newValue[pos] = value[i]; + + if (prev != -1) { + newLink[newPrev] ^= (newLink[newPrev] ^ (long) pos & 0xFFFFFFFFL) & 0xFFFFFFFFL; + newLink[pos] ^= (newLink[pos] ^ ((long) newPrev & 0xFFFFFFFFL) << 32) & 0xFFFFFFFF00000000L; + newPrev = pos; + } else { + newPrev = first = pos; + newLink[pos] = -1L; + } + + t = i; + i = (int) link[i]; + prev = t; + } + + this.link = newLink; + this.last = newPrev; + if (newPrev != -1) { + newLink[newPrev] |= -1 & 0xFFFFFFFFL; + } + + n = newN; + this.mask = mask; + maxFill = maxFill(n, f); + this.key = newKey; + this.value = newValue; } - /** - * Returns a prime number which is <code>>= desiredCapacity</code> and very close to <code>desiredCapacity</code> - * (within 11% if <code>desiredCapacity >= 1000</code>). - * - * @param desiredCapacity the capacity desired by the user. - * @return the capacity which should be used for a hashtable. - */ - protected int nextPrime(int desiredCapacity) { - int i = java.util.Arrays.binarySearch(primeCapacities, desiredCapacity); - if (i < 0) { - // desired capacity not found, choose next prime greater than desired capacity - i = -i - 1; // remember the semantics of binarySearch... + @SuppressWarnings("unchecked") + public OpenHashMap<K, V> clone() { + OpenHashMap<K, V> c; + try { + c = (OpenHashMap<K, V>) super.clone(); + } catch (CloneNotSupportedException cantHappen) { + throw new InternalError(); } - return primeCapacities[i]; + + c.fast = null; + c.keys = null; + c.values = null; + c.entries = null; + c.containsNullKey = containsNullKey; + c.key = key.clone(); + c.value = value.clone(); + c.link = link.clone(); + return c; } - /** - * Returns the number of (key,value) associations currently contained. - * - * @return the number of (key,value) associations currently contained. - */ - public int size() { - return distinct; + public int hashCode() { + int h = 0; + for( int j = realSize(), i = 0, t = 0; j-- != 0; ) { + while (key[i] == null) { + ++i; + } + + if (this != key[i]) { + t = key[i].hashCode(); + } + + if (this != value[i]) { + t ^= value[i] == null ? 0 : value[i].hashCode(); + } + + h += t; + i++; + } + + if (containsNullKey) { + h += value[n] == null ? 0 : value[n].hashCode(); + } + + return h; } - protected static boolean equalsMindTheNull(Object a, Object b) { - if (a == null && b == null) { - return true; + private void writeObject(ObjectOutputStream s) throws IOException { + Object[] key = this.key; + Object[] value = this.value; + OpenHashMap.MapIterator i = new OpenHashMap.MapIterator(null); + s.defaultWriteObject(); + int j = this.size; + + while (j-- != 0) { + int e = i.nextEntry(); + s.writeObject(key[e]); + s.writeObject(value[e]); } - if (a == null || b == null) { - return false; + + } + + private void readObject(ObjectInputStream s) throws IOException, ClassNotFoundException { + s.defaultReadObject(); + this.n = arraySize(this.size, this.f); + this.maxFill = maxFill(this.n, this.f); + this.mask = this.n - 1; + Object[] key = this.key = new Object[this.n + 1]; + Object[] value = this.value = new Object[this.n + 1]; + long[] link = this.link = new long[this.n + 1]; + int prev = -1; + this.first = this.last = -1; + int i = this.size; + + while (i-- != 0) { + Object k = s.readObject(); + Object v = s.readObject(); + int pos; + if (k == null) { + pos = this.n; + this.containsNullKey = true; + } else { + for (pos = mix(k.hashCode()) & this.mask; key[pos] != null; pos = pos + 1 & this.mask) { + ; + } + + key[pos] = k; + } + + value[pos] = v; + if (this.first != -1) { + link[prev] ^= (link[prev] ^ (long) pos & 0xFFFFFFFFL) & 0xFFFFFFFFL; + link[pos] ^= (link[pos] ^ ((long) prev & 0xFFFFFFFFL) << 32) & 0xFFFFFFFF00000000L; + prev = pos; + } else { + prev = this.first = pos; + link[pos] |= 0xFFFFFFFF00000000L; + } + } + + this.last = prev; + if (prev != -1) { + link[prev] |= 0xFFFFFFFFL; + } + + } + + private void checkTable() { + } + + private final class ValueIterator extends MapIterator implements Iterator<V> { + @SuppressWarnings("unchecked") + public V previous() { + return (V) value[this.previousEntry()]; + } + + public void set(V v) { + throw new UnsupportedOperationException(); + } + + public void add(V v) { + throw new UnsupportedOperationException(); + } + + public ValueIterator() { + super(); + } + + @SuppressWarnings("unchecked") + public V next() { + return (V) value[this.nextEntry()]; } - return a.equals(b); } - private class EntrySet extends AbstractSet<Entry<K, V>> { - @Override - public Iterator<Entry<K, V>> iterator() { - return new EntrySetIterator(); + private final class KeySet extends AbstractObjectSet<K> implements SortedSet<K> { + private KeySet() { + } + + public Iterator<K> iterator(K from) { + return new KeyIterator(from); + } + + public Iterator<K> iterator() { + return new KeyIterator(); } - @Override public int size() { - return OpenHashMap.this.size(); + return size; + } + + public boolean contains(Object k) { + return containsKey(k); + } + + public boolean remove(Object k) { + int oldSize = size; + OpenHashMap.this.remove(k); + return size != oldSize; + } + + public void clear() { + OpenHashMap.this.clear(); + } + + @SuppressWarnings("unchecked") + public K first() { + if (size == 0) { + throw new NoSuchElementException(); + } else { + return (K) key[first]; + } + } + + @SuppressWarnings("unchecked") + public K last() { + if (size == 0) { + throw new NoSuchElementException(); + } else { + return (K) key[last]; + } + } + + public Comparator<? super K> comparator() { + return null; + } + + public final SortedSet<K> tailSet(K from) { + throw new UnsupportedOperationException(); + } + + public final SortedSet<K> headSet(K to) { + throw new UnsupportedOperationException(); + } + + public final SortedSet<K> subSet(K from, K to) { + throw new UnsupportedOperationException(); } } - private class EntrySetIterator implements Iterator<Entry<K, V>> { - int idx = -1; - Entry<K,V> next; + private final class KeyIterator extends MapIterator implements Iterator<K> { + public KeyIterator(Object k) { + super(k); + } - EntrySetIterator() { - forward(); + @SuppressWarnings("unchecked") + public K previous() { + return (K) key[this.previousEntry()]; + } + + public void set(K k) { + throw new UnsupportedOperationException(); + } + + public void add(K k) { + throw new UnsupportedOperationException(); + } + + public KeyIterator() { + super(); } @SuppressWarnings("unchecked") - private void forward() { - next = null; - while (next == null && ++idx < table.length) { - if (table[idx] != FREE && table[idx] != REMOVED) { - next = new SimpleImmutableEntry<K, V>((K) table[idx], (V) values[idx]); + public K next() { + return (K) key[this.nextEntry()]; + } + } + + private final class MapEntrySet extends AbstractObjectSet<Entry<K, V>> implements SortedSet<Entry<K, V>> { + private MapEntrySet() { + } + + public EntryIterator iterator() { + return new EntryIterator(); + } + + public Comparator<? super Entry<K, V>> comparator() { + return null; + } + + public SortedSet<Entry<K, V>> subSet(Entry<K, V> fromElement, Entry<K, V> toElement) { + throw new UnsupportedOperationException(); + } + + public SortedSet<Entry<K, V>> headSet(Entry<K, V> toElement) { + throw new UnsupportedOperationException(); + } + + public SortedSet<Entry<K, V>> tailSet(Entry<K, V> fromElement) { + throw new UnsupportedOperationException(); + } + + public Entry<K, V> first() { + if (size == 0) { + throw new NoSuchElementException(); + } else { + return new MapEntry(first); + } + } + + public Entry<K, V> last() { + if (size == 0) { + throw new NoSuchElementException(); + } else { + return new MapEntry(last); + } + } + + public boolean contains(Object o) { + if (!(o instanceof java.util.Map.Entry)) { + return false; + } else { + java.util.Map.Entry e = (java.util.Map.Entry) o; + Object k = e.getKey(); + if (k == null) { + if (containsNullKey) { + if (value[n] == null) { + if (e.getValue() != null) { + return false; + } + } else if (!value[n].equals(e.getValue())) { + return false; + } + + return true; + } + + return false; + } else { + Object[] key = OpenHashMap.this.key; + Object curr; + int pos; + if ((curr = key[pos = mix(k.hashCode()) & mask]) == null) { + return false; + } else if (k.equals(curr)) { + return value[pos] == null ? e.getValue() == null : value[pos].equals(e.getValue()); + } else { + while ((curr = key[pos = pos + 1 & mask]) != null) { + if (k.equals(curr)) { + return value[pos] == null ? e.getValue() == null : value[pos].equals(e.getValue()); + } + } + + return false; + } + } + } + } + + public boolean remove(Object o) { + if (!(o instanceof java.util.Map.Entry)) { + return false; + } else { + java.util.Map.Entry e = (java.util.Map.Entry) o; + Object k = e.getKey(); + Object v = e.getValue(); + if (k == null) { + if (containsNullKey) { + if (value[n] == null) { + if (v != null) { + return false; + } + } else if (!value[n].equals(v)) { + return false; + } + + removeNullEntry(); + return true; + } else { + return false; + } + } else { + Object[] key = OpenHashMap.this.key; + Object curr; + int pos; + if ((curr = key[pos = mix(k.hashCode()) & mask]) == null) { + return false; + } else if (curr.equals(k)) { + if (value[pos] == null) { + if (v != null) { + return false; + } + } else if (!value[pos].equals(v)) { + return false; + } + + removeEntry(pos); + return true; + } else { + while (true) { + do { + if ((curr = key[pos = pos + 1 & mask]) == null) { + return false; + } + } while (!curr.equals(k)); + + if (value[pos] == null) { + if (v == null) { + break; + } + } else if (value[pos].equals(v)) { + break; + } + } + + removeEntry(pos); + return true; + } + } + } + } + + public int size() { + return size; + } + + public void clear() { + OpenHashMap.this.clear(); + } + + public EntryIterator iterator(Entry<K, V> from) { + return new EntryIterator(from.getKey()); + } + + public FastEntryIterator fastIterator() { + return new FastEntryIterator(); + } + + public FastEntryIterator fastIterator(Entry<K, V> from) { + return new FastEntryIterator(from.getKey()); + } + } + + private class FastEntryIterator extends MapIterator implements Iterator<Entry<K, V>> { + final MapEntry entry; + + public FastEntryIterator() { + super(); + this.entry = new MapEntry(); + } + + public FastEntryIterator(Object from) { + super(from); + this.entry = new MapEntry(); + } + + public OpenHashMap.MapEntry next() { + this.entry.index = this.nextEntry(); + return this.entry; + } + + public OpenHashMap.MapEntry previous() { + this.entry.index = this.previousEntry(); + return this.entry; + } + + public void set(Entry<K, V> ok) { + throw new UnsupportedOperationException(); + } + + public void add(Entry<K, V> ok) { + throw new UnsupportedOperationException(); + } + } + + private class EntryIterator extends MapIterator implements Iterator<Entry<K, V>> { + private OpenHashMap.MapEntry entry; + + public EntryIterator() { + super(); + } + + public EntryIterator(Object from) { + super(from); + } + + public OpenHashMap.MapEntry next() { + return this.entry = new MapEntry(this.nextEntry()); + } + + public OpenHashMap.MapEntry previous() { + return this.entry = new MapEntry(this.previousEntry()); + } + + public void remove() { + super.remove(); + this.entry.index = -1; + } + + public void set(Entry<K, V> ok) { + throw new UnsupportedOperationException(); + } + + public void add(Entry<K, V> ok) { + throw new UnsupportedOperationException(); + } + } + + public static abstract class AbstractObjectSet<K> extends AbstractObjectCollection<K> implements Cloneable { + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (!(o instanceof Set)) { + return false; + } else { + Set s = (Set) o; + return s.size() == this.size() && this.containsAll(s); + } + } + + public int hashCode() { + int h = 0; + int n = this.size(); + + Object k; + for (Iterator i = this.iterator(); n-- != 0; h += k == null ? 0 : k.hashCode()) { + k = i.next(); + } + + return h; + } + } + + private class MapIterator { + /** + * The entry that will be returned by the next call to {@link java.util.ListIterator#previous()} (or <code>null</code> if no previous entry exists). + */ + int prev = -1; + /** + * The entry that will be returned by the next call to {@link java.util.ListIterator#next()} (or <code>null</code> if no next entry exists). + */ + int next = -1; + /** + * The last entry that was returned (or -1 if we did not iterate or used {@link java.util.Iterator#remove()}). + */ + int curr = -1; + /** + * The current index (in the sense of a {@link java.util.ListIterator}). Note that this value is not meaningful when this iterator has been created using the nonempty constructor. + */ + int index = -1; + + private MapIterator() { + this.next = first; + this.index = 0; + } + + private MapIterator(Object from) { + if (from == null) { + if (containsNullKey) { + this.next = (int) link[n]; + this.prev = n; + } else { + throw new NoSuchElementException("The key " + from + " does not belong to this map."); + } + } else { + if (key[last] == null ? from == null : (key[last].equals(from))) { + this.prev = last; + this.index = size; + } else { + for (int pos = mix(from.hashCode()) & mask; key[pos] != null; pos = pos + 1 & mask) { + if (key[pos].equals(from)) { + this.next = (int) link[pos]; + this.prev = pos; + return; + } + } + throw new NoSuchElementException("The key " + from + " does not belong to this map."); } } } public boolean hasNext() { - return next != null; + return this.next != -1; + } + + public boolean hasPrevious() { + return this.prev != -1; + } + + private void ensureIndexKnown() { + if (index < 0) { + if (prev == -1) { + index = 0; + } else if (next == -1) { + index = size; + } else { + int pos = first; + for (index = 1; pos != prev; ++index) { + pos = (int) link[pos]; + } + } + } + } + + public int nextIndex() { + ensureIndexKnown(); + return index; + } + + public int previousIndex() { + ensureIndexKnown(); + return index - 1; } - public Entry<K, V> next() { - if (next == null) { + public int nextEntry() { + if (!hasNext()) { throw new NoSuchElementException(); + } else { + curr = next; + next = (int) link[curr]; + prev = curr; + if (index >= 0) { + ++index; + } + return curr; + } + } + + public int previousEntry() { + if (!hasPrevious()) { + throw new NoSuchElementException(); + } else { + curr = prev; + prev = (int) (link[curr] >>> 32); + next = curr; + if (index >= 0) { + --index; + } + return curr; } - Entry<K,V> n = next; - forward(); - return n; } public void remove() { + this.ensureIndexKnown(); + if (curr == -1) throw new IllegalStateException(); + + if (curr == prev) { + /* If the last operation was a next(), we are removing an entry that preceeds + the current index, and thus we must decrement it. */ + index--; + prev = (int) (link[curr] >>> 32); + } else { + next = (int) link[curr]; + } + + size--; + /* Now we manually fix the pointers. Because of our knowledge of next + and prev, this is going to be faster than calling fixPointers(). */ + if (prev == -1) { + first = next; + } else { + link[prev] ^= (link[prev] ^ (long) next & 0xFFFFFFFFL) & 0xFFFFFFFFL; + } + if (next == -1) { + last = prev; + } else { + link[next] ^= (link[next] ^ ((long) prev & 0xFFFFFFFFL) << 32) & 0xFFFFFFFF00000000L; + } + + int last, slot, pos = curr; + curr = -1; + + if (pos == n) { + containsNullKey = false; + value[n] = null; + } else { + Object curr; + Object[] key = OpenHashMap.this.key; + // We have to horribly duplicate the shiftKeys() code because we need to update next/prev. + for (; ; ) { + pos = ((last = pos) + 1) & mask; + for (; ; ) { + if ((curr = key[pos]) == null) { + key[last] = null; + value[last] = null; + return; + } + slot = mix(curr.hashCode()) & mask; + if (last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos) break; + pos = (pos + 1) & mask; + } + key[last] = curr; + value[last] = value[pos]; + if (next == pos) next = last; + if (prev == pos) prev = last; + fixPointers(pos, last); + } + } + } + + public int skip(final int n) { + int i = n; + while (i-- != 0 && hasNext()) nextEntry(); + return n - i - 1; + } + + public int back(final int n) { + int i = n; + while (i-- != 0 && hasPrevious()) previousEntry(); + return n - i - 1; + } + } + + final class MapEntry implements Entry<K, V> { + int index; + + MapEntry(int index) { + this.index = index; + } + + MapEntry() { + } + + @SuppressWarnings("unchecked") + public K getKey() { + return (K) key[this.index]; + } + + @SuppressWarnings("unchecked") + public V getValue() { + return (V) value[this.index]; + } + + @SuppressWarnings("unchecked") + public V setValue(V v) { + Object oldValue = value[this.index]; + value[this.index] = v; + return (V) oldValue; + } + + public boolean equals(Object o) { + if (!(o instanceof Entry)) { + return false; + } else { + Entry e = (Entry) o; + if (key[this.index] == null) { + if (e.getKey() != null) { + return false; + } + } else if (!key[this.index].equals(e.getKey())) { + return false; + } + + if (value[this.index] == null) { + if (e.getValue() != null) { + return false; + } + } else if (!value[this.index].equals(e.getValue())) { + return false; + } + + return true; + } + } + + public int hashCode() { + return (key[this.index] == null ? 0 : + key[this.index].hashCode()) ^ (value[this.index] == null ? 0 : + value[this.index].hashCode()); + } + + public String toString() { + return key[this.index] + "=>" + value[this.index]; + } + } + + + public static abstract class AbstractObjectCollection<K> extends AbstractCollection<K> { + protected AbstractObjectCollection() { + } + + public Object[] toArray() { + Object[] a = new Object[this.size()]; + unwrap(this.iterator(), a); + return a; + } + + @SuppressWarnings("unchecked") + public <T> T[] toArray(T[] a) { + if (a.length < this.size()) { + a = (T[]) Array.newInstance(a.getClass().getComponentType(), this.size()); + } + unwrap(this.iterator(), a); + return a; + } + + public boolean addAll(Collection<? extends K> c) { + boolean retVal = false; + Iterator<? extends K> i = c.iterator(); + int n = c.size(); + + while (n-- != 0) { + if (this.add(i.next())) { + retVal = true; + } + } + + return retVal; + } + + public boolean add(K k) { throw new UnsupportedOperationException(); } + public boolean containsAll(Collection<?> c) { + int n = c.size(); + Iterator i = c.iterator(); + + do { + if (n-- == 0) { + return true; + } + } while (this.contains(i.next())); + + return false; + } + + public boolean retainAll(Collection<?> c) { + boolean retVal = false; + int n = this.size(); + Iterator i = this.iterator(); + + while (n-- != 0) { + if (!c.contains(i.next())) { + i.remove(); + retVal = true; + } + } + + return retVal; + } + + public boolean removeAll(Collection<?> c) { + boolean retVal = false; + int n = c.size(); + Iterator i = c.iterator(); + + while (n-- != 0) { + if (this.remove(i.next())) { + retVal = true; + } + } + + return retVal; + } + + public boolean isEmpty() { + return this.size() == 0; + } + + public String toString() { + StringBuilder s = new StringBuilder(); + Iterator i = this.iterator(); + int n = this.size(); + boolean first = true; + s.append("{"); + + while (n-- != 0) { + if (first) { + first = false; + } else { + s.append(", "); + } + + Object k = i.next(); + if (this == k) { + s.append("(this collection)"); + } else { + s.append(String.valueOf(k)); + } + } + + s.append("}"); + return s.toString(); + } + } + + private static int arraySize(int expected, float f) { + long s = Math.max(2L, nextPowerOfTwo((long) Math.ceil((double) ((float) expected / f)))); + if (s > 0x40000000L) { + throw new IllegalArgumentException("Too large (" + expected + " expected elements with load factor " + f + ")"); + } else { + return (int) s; + } + } + + private static int maxFill(int n, float f) { + return Math.min((int) Math.ceil((double) ((float) n * f)), n - 1); } + private static int nextPowerOfTwo(int x) { + if (x == 0) { + return 1; + } else { + --x; + x |= x >> 1; + x |= x >> 2; + x |= x >> 4; + x |= x >> 8; + return (x | x >> 16) + 1; + } + } + + private static long nextPowerOfTwo(long x) { + if (x == 0L) { + return 1L; + } else { + --x; + x |= x >> 1; + x |= x >> 2; + x |= x >> 4; + x |= x >> 8; + x |= x >> 16; + return (x | x >> 32) + 1L; + } + } + + private static int mix(int x) { + int h = x * -1640531527; + return h ^ h >>> 16; + } + + private static <K> int unwrap(Iterator<? extends K> i, K[] array, int offset, int max) { + if (max < 0) { + throw new IllegalArgumentException("The maximum number of elements (" + max + ") is negative"); + } else if (offset >= 0 && offset + max <= array.length) { + int j; + for (j = max; j-- != 0 && i.hasNext(); array[offset++] = i.next()) { + ; + } + + return max - j - 1; + } else { + throw new IllegalArgumentException(); + } + } + + private static <K> int unwrap(Iterator<? extends K> i, K[] array) { + return unwrap(i, array, 0, array.length); + } } diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMapList.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMapList.java index 54d7da15f..f314ed59a 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMapList.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMapList.java @@ -28,18 +28,21 @@ public class OpenHashMapList<K, V> extends OpenHashMap<K, CopyOnWriteList<V>> { super(initialCapacity); } - public OpenHashMapList(int initialCapacity, double minLoadFactor, double maxLoadFactor) { - super(initialCapacity, minLoadFactor, maxLoadFactor); - } - + @SuppressWarnings("unchecked") public OpenHashMapList<K, V> deepClone() { OpenHashMapList<K, V> copy = (OpenHashMapList<K, V>) super.clone(); - Object[] values = copy.values; - for (int i = 0, l = values.length; i < l; i++) { + Object[] values = copy.value; + for (int i = values.length; i-- > 0;) { if (values[i] != null) { values[i] = new CopyOnWriteList<V>((CopyOnWriteList<V>) values[i]); } } return copy; } + + @Override + protected CopyOnWriteList<V> compute(K key) { + return new CopyOnWriteList<V>(); + } + } diff --git a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMapSet.java b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMapSet.java index 60a1c985a..ff008a48f 100644..100755 --- a/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMapSet.java +++ b/bundles/org.eclipse.osgi/felix/src/org/apache/felix/resolver/util/OpenHashMapSet.java @@ -28,18 +28,20 @@ public class OpenHashMapSet<K, V> extends OpenHashMap<K, CopyOnWriteSet<V>> { super(initialCapacity); } - public OpenHashMapSet(int initialCapacity, double minLoadFactor, double maxLoadFactor) { - super(initialCapacity, minLoadFactor, maxLoadFactor); - } - public OpenHashMapSet<K, V> deepClone() { OpenHashMapSet<K, V> copy = (OpenHashMapSet<K, V>) super.clone(); - Object[] values = copy.values; - for (int i = 0, l = values.length; i < l; i++) { + Object[] values = copy.value; + for (int i = values.length; i-- > 0;) { if (values[i] != null) { values[i] = new CopyOnWriteSet<V>((CopyOnWriteSet<V>) values[i]); } } return copy; } + + @Override + protected CopyOnWriteSet<V> compute(K key) { + return new CopyOnWriteSet<V>(); + } + } |