Skip to main content
summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorStephan Herrmann2016-12-08 18:48:20 +0000
committerStephan Herrmann2016-12-08 18:48:20 +0000
commit957a165b958c9aff0441520d8a2382d835970d78 (patch)
tree3853a95b2811557ae8cdd0915a42dcdae0a0d255
parent0b05eab816270a36246724d73dcccb884bf42c30 (diff)
downloadorg.eclipse.objectteams-957a165b958c9aff0441520d8a2382d835970d78.tar.gz
org.eclipse.objectteams-957a165b958c9aff0441520d8a2382d835970d78.tar.xz
org.eclipse.objectteams-957a165b958c9aff0441520d8a2382d835970d78.zip
update jdt.core to I20161208-0830
-rw-r--r--org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/JavadocBugsTest.java363
-rw-r--r--org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/NullTypeAnnotationTest.java34
-rw-r--r--org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/dom/ASTConverterJavadocTest.java132
-rw-r--r--org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTest.java37
-rw-r--r--org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTestUtil.java17
-rw-r--r--org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/LargeBlockTest.java335
-rw-r--r--org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/RunIndexTests.java1
-rw-r--r--org.eclipse.jdt.core/batch/org/eclipse/jdt/internal/compiler/batch/Main.java2
-rw-r--r--org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/MethodBinding.java8
-rw-r--r--org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/parser/AbstractCommentParser.java61
-rw-r--r--org.eclipse.jdt.core/dom/org/eclipse/jdt/core/dom/DocCommentParser.java12
-rw-r--r--org.eclipse.jdt.core/model/org/eclipse/jdt/internal/core/JavaModelManager.java35
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/Nd.java4
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/RawGrowableArray.java2
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/Database.java620
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LargeBlock.java26
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LongString.java8
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/ShortString.java2
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/ClassFileToIndexConverter.java10
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/Indexer.java29
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/FileFingerprint.java9
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/JavaIndex.java6
-rw-r--r--org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/model/BinaryTypeFactory.java37
23 files changed, 1600 insertions, 190 deletions
diff --git a/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/JavadocBugsTest.java b/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/JavadocBugsTest.java
index 3a96fa9ce..8b86b31bd 100644
--- a/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/JavadocBugsTest.java
+++ b/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/JavadocBugsTest.java
@@ -1,5 +1,5 @@
/*******************************************************************************
- * Copyright (c) 2000, 2014 IBM Corporation and others.
+ * Copyright (c) 2000, 2016 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
@@ -8881,5 +8881,366 @@ public void testBug382606() {
}
);
}
+
+/**
+ * @bug 206345: [javadoc] compiler should not interpret contents of {@literal}
+ * @see "https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345"
+ */
+public void testBug206345a() {
+ // @litteral tags display text without interpreting the text as HTML markup or nested javadoc tags
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@literal raw text:\n" +
+ " * {@link BadLink} is just text}\n" +
+ " * {@link expected_error}\n" +
+ " * }\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ //warning - Tag @link: reference not found: expected_error
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 7)\r\n" +
+ " * {@link expected_error}\r\n" +
+ " ^^^^^^^^^^^^^^\n" +
+ "Javadoc: expected_error cannot be resolved to a type\n" +
+ "----------\n");
+}
+public void testBug206345b() {
+ // same for @code tags
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " * {@link BadLink} is just text}\n" +
+ " * {@link expected_error}\n" +
+ " * }\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ // warning - Tag @link: reference not found: expected_error
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 7)\r\n" +
+ " * {@link expected_error}\r\n" +
+ " ^^^^^^^^^^^^^^\n" +
+ "Javadoc: expected_error cannot be resolved to a type\n" +
+ "----------\n");
+}
+public void testBug206345c() {
+ // verify we still validate other syntax
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@link raw text:\n" +
+ " * {@link BadLink} is just text}\n" +
+ " * {@link expected_error}\n" +
+ " * }\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ // warning - Tag @link: reference not found: raw text: {@link BadLink} is just text
+ // warning - Tag @link: reference not found: expected_error
+ //
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 5)\n" +
+ " * This is {@link raw text:\n" +
+ " ^^^^^^^^^^^^^^^^\n" +
+ "Javadoc: Missing closing brace for inline tag\n" +
+ "----------\n" +
+ "2. ERROR in pkg\\X.java (at line 5)\n" +
+ " * This is {@link raw text:\n" +
+ " ^^^\n" +
+ "Javadoc: raw cannot be resolved to a type\n" +
+ "----------\n" +
+ "3. ERROR in pkg\\X.java (at line 6)\n" +
+ " * {@link BadLink} is just text}\n" +
+ " ^^^^^^^\n" +
+ "Javadoc: BadLink cannot be resolved to a type\n" +
+ "----------\n" +
+ "4. ERROR in pkg\\X.java (at line 7)\n" +
+ " * {@link expected_error}\n" +
+ " ^^^^^^^^^^^^^^\n" +
+ "Javadoc: expected_error cannot be resolved to a type\n" +
+ "----------\n");
+}
+public void testBug206345d() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@literal raw text:\n" +
+ " * {@link BadLink}}}} is just text}\n" +
+ " * {@link expected_error}\n" +
+ " * }\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ // warning - Tag @link: reference not found: expected_error
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 7)\n" +
+ " * {@link expected_error}\n" +
+ " ^^^^^^^^^^^^^^\n" +
+ "Javadoc: expected_error cannot be resolved to a type\n" +
+ "----------\n");
+}
+public void testBug206345e() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " * {{{{{{@link BadLink}}} is just text}\n" +
+ " * @since 4.2\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ // warning - End Delimiter } missing for possible See Tag in comment string: "This is {@code raw text: {{{{{{@link BadLink}}} is just text}"
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 5)\r\n" +
+ " * This is {@code raw text:\n" +
+ " * {{{{{{@link BadLink}}} is just text}\r\n" +
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n" +
+ "Javadoc: Missing closing brace for inline tag\n" +
+ "----------\n");
+}
+public void testBug206345f() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " * {@link BadLink}\n" +
+ " * @since 4.2\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ // warning - End Delimiter } missing for possible See Tag in comment string: "This is {@code raw text: {@link BadLink}"
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 5)\r\n" +
+ " * This is {@code raw text:\n" +
+ " * {@link BadLink}\r\n" +
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n" +
+ "Javadoc: Missing closing brace for inline tag\n" +
+ "----------\n");
+ }
+public void testBug206345g() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " * {@link BadLink\n" +
+ " * @since 4.2\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 5)\n" +
+ " * This is {@code raw text:\n" +
+ " ^^^^^^^^^^^^^^^^\n" +
+ "Javadoc: Missing closing brace for inline tag\n" +
+ "----------\n");
+}
+public void testBug206345h() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " * @since 4.2\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 5)\r\n" +
+ " * This is {@code raw text:\r\n" +
+ " ^^^^^^^^^^^^^^^^\n" +
+ "Javadoc: Missing closing brace for inline tag\n" +
+ "----------\n");
+}
+public void testBug206345i() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 5)\r\n" +
+ " * This is {@code raw text:\r\n" +
+ " ^^^^^^^^^^^^^^^^\n" +
+ "Javadoc: Missing closing brace for inline tag\n" +
+ "----------\n");
+}
+public void testBug206345j() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@literal raw text:\n" +
+ " * {@link BadLink} is just text}\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runConformReferenceTest(units);
+}
+public void testBug206345k() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " * {@link BadLink} is just text}\n" +
+ " * }\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runConformReferenceTest(units);
+}
+public void testBug206345l() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@literal raw text:\n" +
+ " * {@link BadLink}\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ // warning - End Delimiter } missing for possible See Tag in comment string: "This is {@literal raw text: {@link BadLink}"
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 5)\n" +
+ " * This is {@literal raw text:\n" +
+ " * {@link BadLink}\n" +
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n" +
+ "Javadoc: Missing closing brace for inline tag\n" +
+ "----------\n");
+}
+public void testBug206345m() {
+ String[] units = new String[] {
+ "pkg/X.java",
+ "package pkg;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " * {@link BadLink}\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ };
+ this.reportInvalidJavadoc = CompilerOptions.ERROR;
+ this.reportMissingJavadocDescription = CompilerOptions.ALL_STANDARD_TAGS;
+ runNegativeTest(units,
+ // warning - End Delimiter } missing for possible See Tag in comment string: "This is {@code raw text: {@link BadLink}"
+ "----------\n" +
+ "1. ERROR in pkg\\X.java (at line 5)\n" +
+ " * This is {@code raw text:\n" +
+ " * {@link BadLink}\n" +
+ " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n" +
+ "Javadoc: Missing closing brace for inline tag\n" +
+ "----------\n");
+}
}
diff --git a/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/NullTypeAnnotationTest.java b/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/NullTypeAnnotationTest.java
index 9aebfa605..e02c76828 100644
--- a/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/NullTypeAnnotationTest.java
+++ b/org.eclipse.jdt.core.tests.compiler/src/org/eclipse/jdt/core/tests/compiler/regression/NullTypeAnnotationTest.java
@@ -13358,6 +13358,40 @@ public void testBug501464() {
"----------\n"
);
}
+public void testBug507840() {
+ runConformTestWithLibs(
+ new String[] {
+ "nnbd_on_typevar/AtomicReference.java",
+ "package nnbd_on_typevar;\n" +
+ "\n" +
+ "import org.eclipse.jdt.annotation.NonNullByDefault;\n" +
+ "\n" +
+ "@NonNullByDefault\n" +
+ "class AtomicReference<T> {\n" +
+ " public void set(T t) {\n" +
+ " }\n" +
+ "}\n" +
+ "",
+ },
+ getCompilerOptions(),
+ ""
+ );
+ runConformTestWithLibs(
+ new String[] {
+ "nnbd_on_typevar/Usage.java",
+ "package nnbd_on_typevar;\n" +
+ "\n" +
+ "public class Usage {\n" +
+ " void m(AtomicReference<String> ref) {\n" +
+ " ref.set(null);\n" +
+ " }\n" +
+ "}\n" +
+ "",
+ },
+ getCompilerOptions(),
+ ""
+ );
+}
public void testBug508497() {
runConformTestWithLibs(
new String[] {
diff --git a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/dom/ASTConverterJavadocTest.java b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/dom/ASTConverterJavadocTest.java
index 98c170aa6..7a50cfa87 100644
--- a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/dom/ASTConverterJavadocTest.java
+++ b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/dom/ASTConverterJavadocTest.java
@@ -1,5 +1,5 @@
/*******************************************************************************
- * Copyright (c) 2000, 2015 IBM Corporation and others.
+ * Copyright (c) 2000, 2016 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
@@ -117,6 +117,9 @@ public class ASTConverterJavadocTest extends ConverterTestSetup {
private String chars;
// List of tags contained in each comment read from test source.
List allTags = new ArrayList();
+ // tags inhibiting inline tags
+ static final String TAG_CODE = "code";
+ static final String TAG_LITERAL = "literal";
// Current compilation unit
protected ICompilationUnit sourceUnit;
// Test package binding
@@ -342,6 +345,12 @@ public class ASTConverterJavadocTest extends ConverterTestSetup {
String tag = null;
List tags = new ArrayList();
int length = source.length;
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345
+ // when parsing tags such as @code and @literal,
+ // any tag should be discarded and considered as plain text until
+ // properly closed with closing brace
+ boolean considerTagAsPlainText = false;
+ int openingBraces = 0;
char previousChar=0, currentChar=0;
for (int i=0; i<length;) {
previousChar = currentChar;
@@ -440,6 +449,7 @@ public class ASTConverterJavadocTest extends ConverterTestSetup {
if (currentChar >= 'a' && currentChar <= 'z') {
tag += currentChar;
} else {
+ if (tag.equalsIgnoreCase(TAG_LITERAL) || tag.equalsIgnoreCase(TAG_CODE)) considerTagAsPlainText = true;
tags.add(tag);
tag = null;
}
@@ -447,9 +457,17 @@ public class ASTConverterJavadocTest extends ConverterTestSetup {
// Some characters are special in javadoc comments
switch (currentChar) {
case '@':
- if (!lineStarted || previousChar == '{') {
+ if (!lineStarted) {
tag = "";
lineStarted = true;
+ } else if (previousChar == '{') {
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345
+ if (considerTagAsPlainText) {
+ openingBraces++;
+ } else {
+ tag = "";
+ lineStarted = true;
+ }
}
break;
case '\r':
@@ -458,6 +476,16 @@ public class ASTConverterJavadocTest extends ConverterTestSetup {
break;
case '*':
break;
+ case '}':
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345
+ if (considerTagAsPlainText) {
+ if (openingBraces > 0) {
+ openingBraces--;
+ } else {
+ considerTagAsPlainText = false;
+ }
+ }
+ break;
default:
if (!Character.isWhitespace(currentChar)) {
lineStarted = true;
@@ -928,7 +956,8 @@ public class ASTConverterJavadocTest extends ConverterTestSetup {
tagStart += fragment.getLength();
previousFragment = fragment;
}
- if (tagElement.isNested()) {
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345
+ if (!(TAG_CODE.equalsIgnoreCase(tagName) || !TAG_LITERAL.equalsIgnoreCase(tagName)) && tagElement.isNested()) {
assumeEquals(this.prefix+"Wrong end character at <"+tagStart+"> for "+tagElement, '}', source[tagStart++]);
}
}
@@ -3414,4 +3443,101 @@ public class ASTConverterJavadocTest extends ConverterTestSetup {
CompilationUnit unit = (CompilationUnit) runConversion(getJLS3(), this.workingCopies[0], true);
assumeEquals(this.prefix+"Wrong number of comments", 1, unit.getCommentList().size());
}
+
+ /**
+ * @bug 206345: [javadoc] compiler should not interpret contents of {@literal}
+ * @see "https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345"
+ * @deprecated
+ */
+ public void testBug206345a() throws JavaModelException {
+ this.workingCopies = new ICompilationUnit[1];
+ this.astLevel = AST.JLS3;
+ this.workingCopies[0] = getWorkingCopy("/Converter15/src/javadoc/b206345/X.java",
+ "package javadoc.b206345;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@literal raw text:\n" +
+ " * {@link BadLink} is just text}\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ );
+ CompilationUnit compilUnit = (CompilationUnit) runConversion(this.workingCopies[0], true);
+ verifyWorkingCopiesComments();
+ if (this.docCommentSupport.equals(JavaCore.ENABLED)) {
+ // Verify comment type
+ List unitComments = compilUnit.getCommentList();
+ assertEquals("Wrong number of comments", 1, unitComments.size());
+ Comment comment = (Comment) unitComments.get(0);
+ assertEquals("Comment should be javadoc", comment.getNodeType(), ASTNode.JAVADOC);
+ Javadoc docComment = (Javadoc) compilUnit.getCommentList().get(0);
+ assumeEquals(this.prefix+"Wrong number of tags", 1, docComment.tags().size());
+ TagElement tagElement = (TagElement) docComment.tags().get(0);
+ assumeNull(this.prefix+"Wrong type of tag ["+tagElement+"]", tagElement.getTagName());
+ assumeEquals(this.prefix+"Wrong number of fragments in tag ["+tagElement+"]", 3, tagElement.fragments().size());
+ ASTNode fragment = (ASTNode) tagElement.fragments().get(0);
+ assumeEquals(this.prefix+"Invalid type for fragment ["+fragment+"]", ASTNode.TEXT_ELEMENT, fragment.getNodeType());
+ fragment = (ASTNode) tagElement.fragments().get(1);
+ assumeEquals(this.prefix+"Invalid type for fragment ["+fragment+"]", ASTNode.TAG_ELEMENT, fragment.getNodeType());
+ TagElement inlineTag = (TagElement) fragment;
+ assumeEquals(this.prefix+"Wrong number of fragments in tag ["+inlineTag+"]", 1, inlineTag.fragments().size());
+ fragment = (ASTNode) inlineTag.fragments().get(0);
+ assumeEquals(this.prefix+"Invalid type for fragment ["+fragment+"]", ASTNode.TEXT_ELEMENT, fragment.getNodeType());
+ fragment = (ASTNode) tagElement.fragments().get(2);
+ assumeEquals(this.prefix+"Invalid type for fragment ["+fragment+"]", ASTNode.TEXT_ELEMENT, fragment.getNodeType());
+ TextElement textElement = (TextElement) fragment;
+ assumeEquals(this.prefix+"Invalid content for text element ", "{@link BadLink} is just text}", textElement.getText());
+ }
+ }
+ /**
+ *
+ * @throws JavaModelException
+ * @deprecated
+ */
+ public void testBug206345b() throws JavaModelException {
+ this.workingCopies = new ICompilationUnit[1];
+ this.astLevel = AST.JLS3;
+ this.workingCopies[0] = getWorkingCopy("/Converter15/src/javadoc/b206345/X.java",
+ "package javadoc.b206345;\n" +
+ "\n" +
+ "public class X extends Object {\n" +
+ " /**\n" +
+ " * This is {@code raw text:\n" +
+ " * {@link BadLink} is just text}\n" +
+ " */\n" +
+ " public String toString() { \n" +
+ " return \"foo\";\n" +
+ " }\n" +
+ "}\n"
+ );
+ CompilationUnit compilUnit = (CompilationUnit) runConversion(this.workingCopies[0], true);
+ verifyWorkingCopiesComments();
+ if (this.docCommentSupport.equals(JavaCore.ENABLED)) {
+ // Verify comment type
+ List unitComments = compilUnit.getCommentList();
+ assertEquals("Wrong number of comments", 1, unitComments.size());
+ Comment comment = (Comment) unitComments.get(0);
+ assertEquals("Comment should be javadoc", comment.getNodeType(), ASTNode.JAVADOC);
+ Javadoc docComment = (Javadoc) compilUnit.getCommentList().get(0);
+ assumeEquals(this.prefix+"Wrong number of tags", 1, docComment.tags().size());
+ TagElement tagElement = (TagElement) docComment.tags().get(0);
+ assumeNull(this.prefix+"Wrong type of tag ["+tagElement+"]", tagElement.getTagName());
+ assumeEquals(this.prefix+"Wrong number of fragments in tag ["+tagElement+"]", 3, tagElement.fragments().size());
+ ASTNode fragment = (ASTNode) tagElement.fragments().get(0);
+ assumeEquals(this.prefix+"Invalid type for fragment ["+fragment+"]", ASTNode.TEXT_ELEMENT, fragment.getNodeType());
+ fragment = (ASTNode) tagElement.fragments().get(1);
+ assumeEquals(this.prefix+"Invalid type for fragment ["+fragment+"]", ASTNode.TAG_ELEMENT, fragment.getNodeType());
+ TagElement inlineTag = (TagElement) fragment;
+ assumeEquals(this.prefix+"Wrong number of fragments in tag ["+inlineTag+"]", 1, inlineTag.fragments().size());
+ fragment = (ASTNode) inlineTag.fragments().get(0);
+ assumeEquals(this.prefix+"Invalid type for fragment ["+fragment+"]", ASTNode.TEXT_ELEMENT, fragment.getNodeType());
+ fragment = (ASTNode) tagElement.fragments().get(2);
+ assumeEquals(this.prefix+"Invalid type for fragment ["+fragment+"]", ASTNode.TEXT_ELEMENT, fragment.getNodeType());
+ TextElement textElement = (TextElement) fragment;
+ assumeEquals(this.prefix+"Invalid content for text element ", "{@link BadLink} is just text}", textElement.getText());
+ }
+ }
}
diff --git a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTest.java b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTest.java
index 9dd4557c4..3d770f95e 100644
--- a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTest.java
+++ b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTest.java
@@ -18,8 +18,6 @@ import java.util.Random;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.jdt.core.tests.nd.util.BaseTestCase;
import org.eclipse.jdt.internal.core.nd.Nd;
-import org.eclipse.jdt.internal.core.nd.NdNode;
-import org.eclipse.jdt.internal.core.nd.NdNodeTypeRegistry;
import org.eclipse.jdt.internal.core.nd.db.BTree;
import org.eclipse.jdt.internal.core.nd.db.ChunkCache;
import org.eclipse.jdt.internal.core.nd.db.Database;
@@ -40,22 +38,18 @@ public class DatabaseTest extends BaseTestCase {
private static final long TEST_OFFSET = 0;
private Nd nd;
protected Database db;
- private static final int CURRENT_VERSION = 10;
-
@Override
protected void setUp() throws Exception {
super.setUp();
String testName = getName();
- NdNodeTypeRegistry<NdNode> registry = new NdNodeTypeRegistry<>();
- this.nd = new Nd(DatabaseTestUtil.getTempDbName(testName), new ChunkCache(), registry,
- 0, 100, CURRENT_VERSION);
+ this.nd = DatabaseTestUtil.createWithoutNodeRegistry(testName);
this.db = this.nd.getDB();
this.db.setExclusiveLock();
// Allocate all database chunks up to TEST_OFFSET.
int count = 0;
for (long offset = 0; offset < TEST_OFFSET;) {
- offset = this.db.malloc(Database.MAX_MALLOC_SIZE, Database.POOL_MISC);
+ offset = this.db.malloc(Database.MAX_SINGLE_BLOCK_MALLOC_SIZE, Database.POOL_MISC);
if (++count >= 1000) {
this.db.flush();
count = 0;
@@ -70,27 +64,34 @@ public class DatabaseTest extends BaseTestCase {
@Override
protected void tearDown() throws Exception {
- this.db.close();
- if (!this.db.getLocation().delete()) {
- this.db.getLocation().deleteOnExit();
- }
- this.db= null;
+ DatabaseTestUtil.deleteDatabase(this.db);
+ this.db = null;
+ }
+
+ public void testBytesNeededForChunks() throws Exception {
+ int numChunks = 10;
+ long bytes = Database.getBytesThatFitInChunks(numChunks);
+ int measuredChunks = Database.getChunksNeededForBytes(bytes);
+ assertEquals(numChunks, measuredChunks);
}
public void testBlockSizeAndFirstBlock() throws Exception {
- assertEquals(CURRENT_VERSION, this.db.getVersion());
+ assertEquals(DatabaseTestUtil.CURRENT_VERSION, this.db.getVersion());
final int realsize = 42;
- final int deltas = (realsize + Database.BLOCK_HEADER_SIZE + Database.BLOCK_SIZE_DELTA - 1) / Database.BLOCK_SIZE_DELTA;
+ final int deltas = (realsize + Database.BLOCK_HEADER_SIZE + Database.BLOCK_SIZE_DELTA - 1)
+ / Database.BLOCK_SIZE_DELTA;
final int blocksize = deltas * Database.BLOCK_SIZE_DELTA;
- final int freeDeltas= Database.CHUNK_SIZE / Database.BLOCK_SIZE_DELTA - deltas;
+ final int freeDeltas = Database.MAX_BLOCK_DELTAS - deltas;
long mem = this.db.malloc(realsize, Database.POOL_MISC);
assertEquals(-blocksize, this.db.getShort(mem - Database.BLOCK_HEADER_SIZE));
this.db.free(mem, Database.POOL_MISC);
assertEquals(blocksize, this.db.getShort(mem - Database.BLOCK_HEADER_SIZE));
- assertEquals(mem, this.db.getRecPtr((deltas - Database.MIN_BLOCK_DELTAS +1 ) * Database.INT_SIZE));
- assertEquals(mem + blocksize, this.db.getRecPtr((freeDeltas - Database.MIN_BLOCK_DELTAS + 1) * Database.INT_SIZE));
+ assertEquals(mem, this.db
+ .getRecPtr((deltas - Database.MIN_BLOCK_DELTAS) * Database.PTR_SIZE + Database.MALLOC_TABLE_OFFSET));
+ assertEquals(mem + blocksize, this.db.getRecPtr(
+ (freeDeltas - Database.MIN_BLOCK_DELTAS) * Database.PTR_SIZE + Database.MALLOC_TABLE_OFFSET));
}
public void testBug192437() throws Exception {
diff --git a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTestUtil.java b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTestUtil.java
index 30040db2b..d4bfd7366 100644
--- a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTestUtil.java
+++ b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/DatabaseTestUtil.java
@@ -19,6 +19,7 @@ import org.eclipse.jdt.internal.core.nd.Nd;
import org.eclipse.jdt.internal.core.nd.NdNode;
import org.eclipse.jdt.internal.core.nd.NdNodeTypeRegistry;
import org.eclipse.jdt.internal.core.nd.db.ChunkCache;
+import org.eclipse.jdt.internal.core.nd.db.Database;
/**
*
@@ -54,4 +55,20 @@ public class DatabaseTestUtil {
public static Nd createEmptyNd(String testName, NdNodeTypeRegistry<NdNode> registry) {
return new Nd(DatabaseTestUtil.getTempDbName(testName), new ChunkCache(), registry, 0, 0, 0);
}
+
+ static Nd createWithoutNodeRegistry(String testName) {
+ NdNodeTypeRegistry<NdNode> registry = new NdNodeTypeRegistry<>();
+ Nd tempNd = new Nd(getTempDbName(testName), new ChunkCache(), registry, 0, 100,
+ DatabaseTestUtil.CURRENT_VERSION);
+ return tempNd;
+ }
+
+ static final int CURRENT_VERSION = 10;
+
+ static void deleteDatabase(Database db) {
+ db.close();
+ if (!db.getLocation().delete()) {
+ db.getLocation().deleteOnExit();
+ }
+ }
}
diff --git a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/LargeBlockTest.java b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/LargeBlockTest.java
new file mode 100644
index 000000000..674c709c5
--- /dev/null
+++ b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/LargeBlockTest.java
@@ -0,0 +1,335 @@
+/*******************************************************************************
+ * Copyright (c) 2005, 2016 QNX Software Systems and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Public License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/legal/epl-v10.html
+ *
+ * Contributors:
+ * QNX Software Systems - initial API and implementation
+ * Andrew Ferguson (Symbian)
+ * Markus Schorn (Wind River Systems)
+ *******************************************************************************/
+package org.eclipse.jdt.core.tests.nd;
+
+import org.eclipse.jdt.core.tests.nd.util.BaseTestCase;
+import org.eclipse.jdt.internal.core.nd.Nd;
+import org.eclipse.jdt.internal.core.nd.db.Database;
+
+import junit.framework.Test;
+
+/**
+ * Tests for the {@link Database} class.
+ */
+public class LargeBlockTest extends BaseTestCase {
+ private Nd nd;
+ protected Database db;
+
+ @Override
+ protected void setUp() throws Exception {
+ super.setUp();
+ String testName = getName();
+ this.nd = DatabaseTestUtil.createWithoutNodeRegistry(testName);
+ this.db = this.nd.getDB();
+ this.db.setExclusiveLock();
+ this.db.flush();
+ }
+
+ public static Test suite() {
+ return BaseTestCase.suite(LargeBlockTest.class);
+ }
+
+ @Override
+ protected void tearDown() throws Exception {
+ DatabaseTestUtil.deleteDatabase(this.db);
+ this.db = null;
+ }
+
+ private long mallocChunks(int chunks) {
+ return malloc(Database.getBytesThatFitInChunks(chunks));
+ }
+
+ private long malloc(long bytes) {
+ return this.db.malloc(bytes, Database.POOL_MISC);
+ }
+
+ private void free(long address) {
+ this.db.free(address, Database.POOL_MISC);
+ }
+
+ /**
+ * Allocate the maximum number of bytes that can fit in 3 chunks and verify
+ * that it doesn't overflow.
+ */
+ public void testAllocationThatFillsMultipleChunksDoesntOverflow() throws Exception {
+ int chunkCount = this.db.getChunkCount();
+
+ int numChunks = 5;
+ mallocChunks(numChunks);
+
+ assertEquals("The database should not allocate more (or less) memory than is needed", numChunks + chunkCount,
+ this.db.getChunkCount());
+ }
+
+ /**
+ * Allocates a few blocks, frees them, then allocates more blocks. Verifies
+ * that the database reuses the chunks from the first allocation when it
+ * tries to allocate the larger block later.
+ */
+ public void testLastChunkIsReused() throws Exception {
+ int chunkCount = this.db.getChunkCount();
+
+ int numChunks = 10;
+ long temporaryBlockAddress = mallocChunks(3);
+ free(temporaryBlockAddress);
+ mallocChunks(numChunks);
+
+ assertEquals("If the last chunk is free, it should be resized if necessary when a new chunk is requested",
+ numChunks + chunkCount, this.db.getChunkCount());
+ }
+
+ /**
+ * Tests that if there is a single large free block available, that block
+ * will be split and reused if necessary to satisfy a number of smaller
+ * requests.
+ *
+ * @throws Exception
+ */
+ public void testLargeAllocationIsSplitAndReused() throws Exception {
+ long tempAddress = malloc(Database.getBytesThatFitInChunks(10));
+ // Use some space at the end of the database to prevent the allocator
+ // from using the end of the database, where stuff can be easily resized
+ mallocChunks(1);
+ free(tempAddress);
+
+ // Keep track of how much memory we are currently using, so we can
+ // ensure that any further allocations come from the freed block rather
+ // than the end of the database.
+ int chunkCount = this.db.getChunkCount();
+
+ long firstAllocation = mallocChunks(7);
+
+ assertEquals("The freed chunk should be reused (there should be 10 chunks available)", chunkCount,
+ this.db.getChunkCount());
+
+ long secondAllocation = mallocChunks(1);
+
+ assertEquals("The freed chunk should be reused (there should be 3 chunks available)", chunkCount,
+ this.db.getChunkCount());
+
+ long thirdAllocation = mallocChunks(2);
+
+ assertEquals("The freed chunk should be reused (there should be exactly 2 chunks available)", chunkCount,
+ this.db.getChunkCount());
+ assertTrue(
+ "Allocations should happen from the start of the database if it makes no difference to fragmentation",
+ secondAllocation > firstAllocation);
+ assertTrue("Free space should have been kept next to the largest block for as long as possible",
+ secondAllocation > thirdAllocation);
+
+ // Do another allocation when there are no free chunks
+ mallocChunks(1);
+
+ assertEquals("New chunks should be allocated when the database is out of free blocks", chunkCount + 1,
+ this.db.getChunkCount());
+ }
+
+ /**
+ * Verifies that if a block is freed and the previous block is also free,
+ * the two free blocks will be combined into a single larger block.
+ */
+ public void testFreeBlockMergesWithPrevious() throws Exception {
+ long firstBlock = mallocChunks(1);
+ long secondBlock = mallocChunks(1);
+ mallocChunks(1);
+
+ free(firstBlock);
+ free(secondBlock);
+
+ int chunkCount = this.db.getChunkCount();
+
+ mallocChunks(2);
+ assertEquals("The merged block should have been used", chunkCount, this.db.getChunkCount());
+ }
+
+ /**
+ * Verifies that if a block is freed and the next block is also free, the
+ * two free blocks will be combined into a single larger block.
+ */
+ public void testFreeBlockMergesWithNext() throws Exception {
+ long firstBlock = mallocChunks(1);
+ long secondBlock = mallocChunks(1);
+ mallocChunks(1);
+
+ free(secondBlock);
+ free(firstBlock);
+
+ int chunkCount = this.db.getChunkCount();
+
+ mallocChunks(2);
+ assertEquals("The merged block should have been used", chunkCount, this.db.getChunkCount());
+ }
+
+ /**
+ * Verifies that if a block is freed and the blocks on both sides are also
+ * free, the three free blocks will be combined into a single larger block.
+ */
+ public void testFreeBlockMergesWithBothNextAndPrevious() throws Exception {
+ long firstBlock = mallocChunks(1);
+ long secondBlock = mallocChunks(1);
+ long thirdBlock = mallocChunks(1);
+ mallocChunks(1);
+
+ free(firstBlock);
+ free(thirdBlock);
+ free(secondBlock);
+
+ int chunkCount = this.db.getChunkCount();
+
+ mallocChunks(3);
+ assertEquals("The merged block should have been used", chunkCount, this.db.getChunkCount());
+ }
+
+ /**
+ * Tests removal of a chunk from the free space trie when there are
+ * duplicate free space nodes with the same size and the node being removed
+ * isn't the one with the embedded trie node.
+ */
+ public void testRemoveFreeSpaceNodeFromDuplicateList() throws Exception {
+ long chunk1 = mallocChunks(1);
+ mallocChunks(1);
+ long chunk3 = mallocChunks(1);
+ long chunk4 = mallocChunks(1);
+ mallocChunks(1);
+
+ int chunkCount = this.db.getChunkCount();
+
+ free(chunk1);
+ free(chunk3);
+ // At this point chunks 1 and 3 should be in the same linked list. Chunk
+ // 1 contains the embedded trie.
+
+ free(chunk4);
+ // Should merge with chunk3, causing it to be removed from the list
+
+ // Verify that we can allocate the merged chunk 3+4
+ mallocChunks(2);
+
+ assertEquals("Chunks 3 and 4 should have been merged", chunkCount, this.db.getChunkCount());
+ }
+
+ /**
+ * Tests removal of a chunk from the free space trie when the node being
+ * removed was part of the embedded trie and it has a non-empty list of
+ * other nodes of the same size.
+ */
+ public void testRemoveFreeSpaceNodeFromTrieWithDuplicates() throws Exception {
+ long chunk1 = mallocChunks(1);
+ mallocChunks(1);
+ long chunk3 = mallocChunks(1);
+ long chunk4 = mallocChunks(1);
+ mallocChunks(1);
+
+ int chunkCount = this.db.getChunkCount();
+
+ free(chunk3);
+ free(chunk1);
+ // At this point chunks 1 and 3 should be in the same linked list. Chunk
+ // 3 contains the embedded trie.
+
+ free(chunk4);
+ // Should merge with chunk3, causing it to be removed from the list
+
+ // Verify that we can allocate the merged chunk 3+4
+ mallocChunks(2);
+
+ assertEquals("Chunks 3 and 4 should have been merged", chunkCount, this.db.getChunkCount());
+ }
+
+ /**
+ * Tests reusing a chunk from the free space trie when it contains
+ * duplicates.
+ */
+ public void testReuseDeallocatedChunksWithMultipleFreeSpaceNodesOfTheSameSize() throws Exception {
+ long chunk1 = mallocChunks(2);
+ mallocChunks(1);
+ long chunk3 = mallocChunks(2);
+ mallocChunks(1);
+ long chunk5 = mallocChunks(2);
+ mallocChunks(1);
+
+ int chunkCount = this.db.getChunkCount();
+
+ free(chunk1);
+ free(chunk3);
+ free(chunk5);
+
+ mallocChunks(2);
+
+ assertEquals("A chunk should have been reused", chunkCount, this.db.getChunkCount());
+ }
+
+ /**
+ * Tests various corner cases in the trie map.
+ */
+ public void testTriesOfVariousSize() throws Exception {
+ long chunk1 = mallocChunks(1);
+ mallocChunks(1);
+ long chunk2 = mallocChunks(2);
+ mallocChunks(1);
+ long chunk3 = mallocChunks(3);
+ mallocChunks(1);
+ long chunk4 = mallocChunks(5);
+ mallocChunks(1);
+ long chunk5 = mallocChunks(6);
+ mallocChunks(1);
+ long chunk6 = mallocChunks(6);
+ mallocChunks(1);
+ long chunk7 = mallocChunks(10);
+ mallocChunks(1);
+ long chunk8 = mallocChunks(20);
+ mallocChunks(1);
+
+ int chunkCount = this.db.getChunkCount();
+
+ free(chunk7);
+ free(chunk4);
+ free(chunk1);
+ free(chunk3);
+ free(chunk8);
+ free(chunk5);
+ free(chunk2);
+ free(chunk6);
+
+ mallocChunks(4);
+ mallocChunks(10);
+
+ assertEquals("A chunk should have been reused", chunkCount, this.db.getChunkCount());
+ }
+
+ /**
+ * Tests that if there are multiple free blocks of different sizes and of
+ * exactly one of the requested size, that one is always selected.
+ */
+ public void testBestBlockIsAlwaysSelected() throws Exception {
+ int[] sizes = { 11, 2, 6, 1, 9, 10, 7, 8, 12, 20, 15, 3 };
+ long[] pointers = new long[sizes.length];
+
+ for (int idx = 0; idx < sizes.length; idx++) {
+ pointers[idx] = mallocChunks(sizes[idx]);
+ mallocChunks(1);
+ }
+
+ int chunkCount = this.db.getChunkCount();
+
+ for (int idx = 0; idx < pointers.length; idx++) {
+ free(pointers[idx]);
+ }
+
+ for (int idx = 0; idx < sizes.length; idx++) {
+ long nextPointer = mallocChunks(sizes[idx]);
+ assertEquals("Returned wrong pointer for malloc of " + sizes[idx] + " chunks", pointers[idx], nextPointer);
+ assertEquals("A chunk should have been reused", chunkCount, this.db.getChunkCount());
+ }
+ }
+}
diff --git a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/RunIndexTests.java b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/RunIndexTests.java
index a6c2bbc16..740636bef 100644
--- a/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/RunIndexTests.java
+++ b/org.eclipse.jdt.core.tests.model/src/org/eclipse/jdt/core/tests/nd/RunIndexTests.java
@@ -31,6 +31,7 @@ public static Class[] getAllTestClasses() {
FieldBackPointerTest.class,
IndexerTest.class,
InheritenceTests.class,
+ LargeBlockTest.class,
SearchKeyTests.class
};
}
diff --git a/org.eclipse.jdt.core/batch/org/eclipse/jdt/internal/compiler/batch/Main.java b/org.eclipse.jdt.core/batch/org/eclipse/jdt/internal/compiler/batch/Main.java
index 12a301cf1..8f6a0c06c 100644
--- a/org.eclipse.jdt.core/batch/org/eclipse/jdt/internal/compiler/batch/Main.java
+++ b/org.eclipse.jdt.core/batch/org/eclipse/jdt/internal/compiler/batch/Main.java
@@ -4801,7 +4801,7 @@ protected void setPaths(ArrayList bootclasspaths,
}
}
}
-private static boolean shouldIgnoreOptionalProblems(char[][] folderNames, char[] fileName) {
+protected final static boolean shouldIgnoreOptionalProblems(char[][] folderNames, char[] fileName) {
if (folderNames == null || fileName == null) {
return false;
}
diff --git a/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/MethodBinding.java b/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/MethodBinding.java
index 5267b7a50..e54a71778 100644
--- a/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/MethodBinding.java
+++ b/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/lookup/MethodBinding.java
@@ -945,12 +945,16 @@ protected void fillInDefaultNonNullness(AbstractMethodDeclaration sourceMethod)
//pre: null annotation analysis is enabled
protected void fillInDefaultNonNullness18(AbstractMethodDeclaration sourceMethod, LookupEnvironment env) {
+ MethodBinding original = original();
+ if(original == null) {
+ return;
+ }
if (hasNonNullDefaultFor(DefaultLocationParameter, true)) {
boolean added = false;
int length = this.parameters.length;
for (int i = 0; i < length; i++) {
TypeBinding parameter = this.parameters[i];
- if (!parameter.acceptsNonNullDefault())
+ if (!original.parameters[i].acceptsNonNullDefault())
continue;
long existing = parameter.tagBits & TagBits.AnnotationNullMASK;
if (existing == 0L) {
@@ -968,7 +972,7 @@ protected void fillInDefaultNonNullness18(AbstractMethodDeclaration sourceMethod
if (added)
this.tagBits |= TagBits.HasParameterAnnotations;
}
- if (this.returnType != null && hasNonNullDefaultFor(DefaultLocationReturnType, true) && this.returnType.acceptsNonNullDefault()) {
+ if (original.returnType != null && hasNonNullDefaultFor(DefaultLocationReturnType, true) && original.returnType.acceptsNonNullDefault()) {
if ((this.returnType.tagBits & TagBits.AnnotationNullMASK) == 0) {
this.returnType = env.createAnnotatedType(this.returnType, new AnnotationBinding[]{env.getNonNullAnnotation()});
} else if (sourceMethod instanceof MethodDeclaration && (this.returnType.tagBits & TagBits.AnnotationNonNull) != 0
diff --git a/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/parser/AbstractCommentParser.java b/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/parser/AbstractCommentParser.java
index f7efeb68a..5ec5abcca 100644
--- a/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/parser/AbstractCommentParser.java
+++ b/org.eclipse.jdt.core/compiler/org/eclipse/jdt/internal/compiler/parser/AbstractCommentParser.java
@@ -1,5 +1,5 @@
/*******************************************************************************
- * Copyright (c) 2000, 2013 IBM Corporation and others.
+ * Copyright (c) 2000, 2016 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
@@ -176,7 +176,13 @@ public abstract class AbstractCommentParser implements JavadocTagConstants {
}
this.lineEnd = (this.linePtr == this.lastLinePtr) ? this.javadocEnd: this.scanner.getLineEnd(this.linePtr) - 1;
this.javadocTextEnd = this.javadocEnd - 2; // supposed text end, it will be refined later...
-
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345
+ // when parsing tags such as @code and @literal,
+ // any tag should be discarded and considered as plain text until
+ // properly closed with closing brace
+ boolean considerTagAsPlainText = false;
+ // internal counter for opening braces
+ int openingBraces = 0;
// Loop on each comment character
int textEndPosition = -1;
while (!this.abort && this.index < this.javadocEnd) {
@@ -212,7 +218,20 @@ public abstract class AbstractCommentParser implements JavadocTagConstants {
switch (nextCharacter) {
case '@' :
// Start tag parsing only if we are on line beginning or at inline tag beginning
- if ((!this.lineStarted || previousChar == '{')) {
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345: ignore all tags when inside @literal or @code tags
+ if (considerTagAsPlainText) {
+ // new tag found
+ if (!this.lineStarted) {
+ // we may want to report invalid syntax when no closing brace found,
+ // or when incoherent number of closing braces found
+ if (openingBraces > 0 && this.reportProblems) {
+ this.sourceParser.problemReporter().javadocUnterminatedInlineTag(this.inlineTagStart, invalidInlineTagLineEnd);
+ }
+ considerTagAsPlainText = false;
+ this.inlineTagStarted = false;
+ openingBraces = 0;
+ }
+ } else if ((!this.lineStarted || previousChar == '{')) {
if (this.inlineTagStarted) {
setInlineTagStarted(false);
// bug https://bugs.eclipse.org/bugs/show_bug.cgi?id=53279
@@ -256,6 +275,12 @@ public abstract class AbstractCommentParser implements JavadocTagConstants {
invalidTagLineEnd = this.lineEnd;
textEndPosition = this.index;
}
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345
+ // dealing with @literal or @code tags: ignore next tags
+ if (!isFormatterParser && (this.tagValue == TAG_LITERAL_VALUE || this.tagValue == TAG_CODE_VALUE)) {
+ considerTagAsPlainText = true;
+ openingBraces++;
+ }
} catch (InvalidInputException e) {
consumeToken();
}
@@ -288,13 +313,24 @@ public abstract class AbstractCommentParser implements JavadocTagConstants {
if (verifText && this.tagValue == TAG_RETURN_VALUE && this.returnStatement != null) {
refreshReturnStatement();
}
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345: when ignoring tags, only decrement the opening braces counter
+ if (considerTagAsPlainText) {
+ invalidInlineTagLineEnd = this.lineEnd;
+ if (--openingBraces == 0) {
+ considerTagAsPlainText = false; // re-enable tag validation
+ }
+ }
if (this.inlineTagStarted) {
textEndPosition = this.index - 1;
- if (this.lineStarted && this.textStart != -1 && this.textStart < textEndPosition) {
- pushText(this.textStart, textEndPosition);
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345: do not push text yet if ignoring tags
+ if (!considerTagAsPlainText) {
+ if (this.lineStarted && this.textStart != -1 && this.textStart < textEndPosition) {
+ pushText(this.textStart, textEndPosition);
+ }
+ refreshInlineTagPosition(previousPosition);
}
- refreshInlineTagPosition(previousPosition);
- if (!isFormatterParser) this.textStart = this.index;
+ if (!isFormatterParser && !considerTagAsPlainText)
+ this.textStart = this.index;
setInlineTagStarted(false);
} else {
if (!this.lineStarted) {
@@ -308,7 +344,10 @@ public abstract class AbstractCommentParser implements JavadocTagConstants {
if (verifText && this.tagValue == TAG_RETURN_VALUE && this.returnStatement != null) {
refreshReturnStatement();
}
- if (this.inlineTagStarted) {
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345: count opening braces when ignoring tags
+ if (considerTagAsPlainText) {
+ openingBraces++;
+ } else if (this.inlineTagStarted) {
setInlineTagStarted(false);
// bug https://bugs.eclipse.org/bugs/show_bug.cgi?id=53279
// Cannot have opening brace in inline comment
@@ -329,7 +368,8 @@ public abstract class AbstractCommentParser implements JavadocTagConstants {
this.textStart = previousPosition;
}
this.lineStarted = true;
- this.inlineTagStart = previousPosition;
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345: do not update tag start position when ignoring tags
+ if (!considerTagAsPlainText) this.inlineTagStart = previousPosition;
break;
case '*' :
// Store the star position as text start while formatting
@@ -399,7 +439,8 @@ public abstract class AbstractCommentParser implements JavadocTagConstants {
// bug https://bugs.eclipse.org/bugs/show_bug.cgi?id=53279
// Cannot leave comment inside inline comment
- if (this.inlineTagStarted) {
+ // https://bugs.eclipse.org/bugs/show_bug.cgi?id=206345: handle unterminated @code or @literal tag
+ if (this.inlineTagStarted || considerTagAsPlainText) {
if (this.reportProblems) {
int end = this.javadocTextEnd<invalidInlineTagLineEnd ? this.javadocTextEnd : invalidInlineTagLineEnd;
if (this.index >= this.javadocEnd) end = invalidInlineTagLineEnd;
diff --git a/org.eclipse.jdt.core/dom/org/eclipse/jdt/core/dom/DocCommentParser.java b/org.eclipse.jdt.core/dom/org/eclipse/jdt/core/dom/DocCommentParser.java
index 1136b3497..6bb53903a 100644
--- a/org.eclipse.jdt.core/dom/org/eclipse/jdt/core/dom/DocCommentParser.java
+++ b/org.eclipse.jdt.core/dom/org/eclipse/jdt/core/dom/DocCommentParser.java
@@ -1,5 +1,5 @@
/*******************************************************************************
- * Copyright (c) 2004, 2013 IBM Corporation and others.
+ * Copyright (c) 2004, 2016 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
@@ -427,6 +427,9 @@ class DocCommentParser extends AbstractCommentParser {
if (length == TAG_CATEGORY_LENGTH && CharOperation.equals(TAG_CATEGORY, tagName)) {
this.tagValue = TAG_CATEGORY_VALUE;
valid = parseIdentifierTag(false); // TODO (frederic) reconsider parameter value when @category will be significant in spec
+ } else if (length == TAG_CODE_LENGTH && CharOperation.equals(TAG_CODE, tagName)) {
+ this.tagValue = TAG_CODE_VALUE;
+ createTag();
} else {
this.tagValue = TAG_OTHERS_VALUE;
createTag();
@@ -490,8 +493,11 @@ class DocCommentParser extends AbstractCommentParser {
this.tagValue = TAG_LINK_VALUE;
} else if (length == TAG_LINKPLAIN_LENGTH && CharOperation.equals(TAG_LINKPLAIN, tagName)) {
this.tagValue = TAG_LINKPLAIN_VALUE;
+ } else if (length == TAG_LITERAL_LENGTH && CharOperation.equals(TAG_LITERAL, tagName)) {
+ this.tagValue = TAG_LITERAL_VALUE;
}
- if (this.tagValue != NO_TAG_VALUE) {
+
+ if (this.tagValue != NO_TAG_VALUE && this.tagValue != TAG_LITERAL_VALUE) {
if (this.inlineTagStarted) {
valid = parseReference();
} else {
@@ -500,7 +506,7 @@ class DocCommentParser extends AbstractCommentParser {
valid = false;
}
} else {
- this.tagValue = TAG_OTHERS_VALUE;
+ if (this.tagValue == NO_TAG_VALUE) this.tagValue = TAG_OTHERS_VALUE;
createTag();
}
break;
diff --git a/org.eclipse.jdt.core/model/org/eclipse/jdt/internal/core/JavaModelManager.java b/org.eclipse.jdt.core/model/org/eclipse/jdt/internal/core/JavaModelManager.java
index 8b4feaa1a..0985da93b 100644
--- a/org.eclipse.jdt.core/model/org/eclipse/jdt/internal/core/JavaModelManager.java
+++ b/org.eclipse.jdt.core/model/org/eclipse/jdt/internal/core/JavaModelManager.java
@@ -28,6 +28,7 @@ import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
+import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.StringReader;
@@ -179,7 +180,7 @@ public class JavaModelManager implements ISaveParticipant, IContentTypeChangeLis
private static final String ASSUMED_EXTERNAL_FILES_CACHE = "assumedExternalFilesCache"; //$NON-NLS-1$
public static enum ArchiveValidity {
- BAD_FORMAT, UNABLE_TO_READ, VALID;
+ BAD_FORMAT, UNABLE_TO_READ, FILE_NOT_FOUND, VALID;
public boolean isValid() {
return this == VALID;
@@ -2776,7 +2777,10 @@ public class JavaModelManager implements ISaveParticipant, IContentTypeChangeLis
* zip/jar, or it must be an absolute workspace relative path if
* representing a zip/jar inside the workspace.
*
- * @exception CoreException If unable to create/open the ZipFile
+ * @exception CoreException If unable to create/open the ZipFile. The
+ * cause will be a {@link ZipException} if the file was corrupt, a
+ * {@link FileNotFoundException} if the file does not exist, or a
+ * {@link IOException} if we were unable to read the file.
*/
public ZipFile getZipFile(IPath path) throws CoreException {
return getZipFile(path, true);
@@ -2791,7 +2795,7 @@ public class JavaModelManager implements ISaveParticipant, IContentTypeChangeLis
*/
public static boolean throwIoExceptionsInGetZipFile = false;
- private ZipFile getZipFile(IPath path, boolean checkInvalidArchiveCache) throws CoreException {
+ public ZipFile getZipFile(IPath path, boolean checkInvalidArchiveCache) throws CoreException {
if (checkInvalidArchiveCache) {
throwExceptionIfArchiveInvalid(path);
}
@@ -2816,7 +2820,15 @@ public class JavaModelManager implements ISaveParticipant, IContentTypeChangeLis
}
return zipFile;
} catch (IOException e) {
- ArchiveValidity reason = (e instanceof ZipException) ? ArchiveValidity.BAD_FORMAT : ArchiveValidity.UNABLE_TO_READ;
+ ArchiveValidity reason;
+
+ if (e instanceof ZipException) {
+ reason = ArchiveValidity.BAD_FORMAT;
+ } else if (e instanceof FileNotFoundException) {
+ reason = ArchiveValidity.FILE_NOT_FOUND;
+ } else {
+ reason = ArchiveValidity.UNABLE_TO_READ;
+ }
addInvalidArchive(path, reason);
throw new CoreException(new Status(IStatus.ERROR, JavaCore.PLUGIN_ID, -1, Messages.status_IOException, e));
}
@@ -2844,13 +2856,14 @@ public class JavaModelManager implements ISaveParticipant, IContentTypeChangeLis
private void throwExceptionIfArchiveInvalid(IPath path) throws CoreException {
ArchiveValidity validity = getArchiveValidity(path);
- if (!validity.isValid()) {
- IOException reason;
- if (validity == ArchiveValidity.BAD_FORMAT) {
- reason = new ZipException();
- } else {
- reason = new IOException();
- }
+ IOException reason;
+ switch (validity) {
+ case BAD_FORMAT: reason = new ZipException(); break;
+ case FILE_NOT_FOUND: reason = new FileNotFoundException(); break;
+ case UNABLE_TO_READ: reason = new IOException(); break;
+ default: reason = null;
+ }
+ if (reason != null) {
throw new CoreException(new Status(IStatus.ERROR, JavaCore.PLUGIN_ID, -1, Messages.status_IOException, reason));
}
}
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/Nd.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/Nd.java
index 71e492f3d..f38304d22 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/Nd.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/Nd.java
@@ -169,8 +169,8 @@ public class Nd {
this.db.setLocked(lockDB);
if (!isSupportedVersion()) {
- Package.log("Index database is uses an unsupported version " + this.db.getVersion() //$NON-NLS-1$
- + " Deleting and recreating.", null); //$NON-NLS-1$
+ Package.logInfo("Index database uses the unsupported version " + this.db.getVersion() //$NON-NLS-1$
+ + ". Deleting and recreating."); //$NON-NLS-1$
this.db.close();
this.fPath.delete();
this.db = new Database(this.fPath, cache, getDefaultVersion(), isPermanentlyReadOnly());
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/RawGrowableArray.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/RawGrowableArray.java
index df7ca4ee9..2bd371c05 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/RawGrowableArray.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/RawGrowableArray.java
@@ -126,7 +126,7 @@ public final class RawGrowableArray {
GROWABLE_BLOCK_HEADER_BYTES = type.size();
- MAX_GROWABLE_SIZE = (Database.MAX_MALLOC_SIZE - GROWABLE_BLOCK_HEADER_BYTES)
+ MAX_GROWABLE_SIZE = (Database.MAX_SINGLE_BLOCK_MALLOC_SIZE - GROWABLE_BLOCK_HEADER_BYTES)
/ Database.PTR_SIZE;
}
}
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/Database.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/Database.java
index 02060fe08..8bb0ed081 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/Database.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/Database.java
@@ -46,8 +46,9 @@ import org.eclipse.osgi.util.NLS;
* INT_SIZE | pointer to head of linked list of blocks of size MIN_BLOCK_DELTAS*BLOCK_SIZE_DELTA
* .. | ...
* INT_SIZE * (M + 1) | pointer to head of linked list of blocks of size (M + MIN_BLOCK_DELTAS) * BLOCK_SIZE_DELTA
+ * FREE_BLOCK_OFFSET | chunk number for the root of the large block free space trie
* WRITE_NUMBER_OFFSET | long integer which is incremented on every write
- * MALLOC_STATS_OFFSET | memory usage statistics
+ * MALLOC_STATS_OFFSET | memory usage statistics
* DATA_AREA | The database singletons are stored here and use the remainder of chunk 0
*
* M = CHUNK_SIZE / BLOCK_SIZE_DELTA - MIN_BLOCK_DELTAS
@@ -86,21 +87,26 @@ public class Database {
private static final int BLOCK_PREV_OFFSET = BLOCK_HEADER_SIZE;
private static final int BLOCK_NEXT_OFFSET = BLOCK_HEADER_SIZE + INT_SIZE;
private static final int FREE_BLOCK_HEADER_SIZE = BLOCK_NEXT_OFFSET + INT_SIZE;
-
- public static final int MIN_BLOCK_DELTAS = (FREE_BLOCK_HEADER_SIZE + BLOCK_SIZE_DELTA - 1) /
- BLOCK_SIZE_DELTA; // Must be enough multiples of BLOCK_SIZE_DELTA in order to fit the free block header
- public static final int MAX_BLOCK_DELTAS = CHUNK_SIZE / BLOCK_SIZE_DELTA;
- public static final int MAX_MALLOC_SIZE = MAX_BLOCK_DELTAS * BLOCK_SIZE_DELTA - BLOCK_HEADER_SIZE;
- public static final int PTR_SIZE = 4; // size of a pointer in the database in bytes
+
+ // Must be enough multiples of BLOCK_SIZE_DELTA in order to fit the free block header
+ public static final int MIN_BLOCK_DELTAS = (FREE_BLOCK_HEADER_SIZE + BLOCK_SIZE_DELTA - 1) / BLOCK_SIZE_DELTA;
+ public static final int MAX_BLOCK_DELTAS = (CHUNK_SIZE - LargeBlock.HEADER_SIZE - LargeBlock.FOOTER_SIZE)
+ / BLOCK_SIZE_DELTA;
+ public static final int MAX_SINGLE_BLOCK_MALLOC_SIZE = MAX_BLOCK_DELTAS * BLOCK_SIZE_DELTA - BLOCK_HEADER_SIZE;
+ public static final int PTR_SIZE = 4; // size of a pointer in the database in bytes
public static final int STRING_SIZE = PTR_SIZE;
public static final int FLOAT_SIZE = INT_SIZE;
public static final int DOUBLE_SIZE = LONG_SIZE;
public static final long MAX_DB_SIZE= ((long) 1 << (Integer.SIZE + BLOCK_SIZE_DELTA_BITS));
+ public static final long MAX_MALLOC_SIZE = MAX_DB_SIZE - LargeBlock.HEADER_SIZE - LargeBlock.FOOTER_SIZE
+ - CHUNK_SIZE - BLOCK_HEADER_SIZE;
+
public static final int VERSION_OFFSET = 0;
- private static final int MALLOC_TABLE_OFFSET = VERSION_OFFSET + INT_SIZE;
- public static final int WRITE_NUMBER_OFFSET = MALLOC_TABLE_OFFSET
+ public static final int MALLOC_TABLE_OFFSET = VERSION_OFFSET + INT_SIZE;
+ public static final int FREE_BLOCK_OFFSET = MALLOC_TABLE_OFFSET
+ (CHUNK_SIZE / BLOCK_SIZE_DELTA - MIN_BLOCK_DELTAS + 1) * INT_SIZE;
+ public static final int WRITE_NUMBER_OFFSET = FREE_BLOCK_OFFSET + PTR_SIZE;
public static final int MALLOC_STATS_OFFSET = WRITE_NUMBER_OFFSET + LONG_SIZE;
public static final int DATA_AREA_OFFSET = MALLOC_STATS_OFFSET + MemoryStats.SIZE;
@@ -128,7 +134,6 @@ public class Database {
private final Chunk fHeaderChunk;
private Chunk[] fChunks;
private int fChunksUsed;
- private int fChunksAllocated;
private ChunkCache fCache;
private long malloced;
@@ -159,12 +164,12 @@ public class Database {
if (nChunksOnDisk <= 0) {
this.fVersion= version;
this.fChunks= new Chunk[1];
- this.fChunksUsed = this.fChunksAllocated = this.fChunks.length;
+ this.fChunksUsed = this.fChunks.length;
} else {
this.fHeaderChunk.read();
this.fVersion= this.fHeaderChunk.getInt(VERSION_OFFSET);
this.fChunks = new Chunk[nChunksOnDisk]; // chunk[0] is unused.
- this.fChunksUsed = this.fChunksAllocated = nChunksOnDisk;
+ this.fChunksUsed = nChunksOnDisk;
}
} catch (IOException e) {
throw new IndexException(new DBStatus(e));
@@ -172,8 +177,8 @@ public class Database {
this.memoryUsage = new MemoryStats(this.fHeaderChunk, MALLOC_STATS_OFFSET);
}
- private static int divideRoundingUp(int num, int den) {
- return (num + den - 1) / den;
+ private static int divideRoundingUp(long num, int den) {
+ return (int) ((num + den - 1) / den);
}
private void openFile() throws FileNotFoundException {
@@ -292,7 +297,7 @@ public class Database {
this.fHeaderChunk.clear(0, CHUNK_SIZE);
// Chunks have been removed from the cache, so we may just reset the array of chunks.
this.fChunks = new Chunk[] {null};
- this.fChunksUsed = this.fChunksAllocated = this.fChunks.length;
+ this.fChunksUsed = this.fChunks.length;
try {
wasCanceled = this.fHeaderChunk.flush() || wasCanceled; // Zero out header chunk.
wasCanceled = performUninterruptableWrite(() -> {
@@ -381,7 +386,7 @@ public class Database {
*/
public void memcpy(long dest, long source, int numBytes) {
assert numBytes >= 0;
- assert numBytes <= MAX_MALLOC_SIZE;
+ assert numBytes <= MAX_SINGLE_BLOCK_MALLOC_SIZE;
// TODO: make use of lower-level System.arrayCopy
for (int count = 0; count < numBytes; count++) {
putByte(dest + count, getByte(source + count));
@@ -391,78 +396,442 @@ public class Database {
/**
* Allocate a block out of the database.
*/
- public long malloc(final int datasize, final short poolId) throws IndexException {
+ public long malloc(final long datasize, final short poolId) throws IndexException {
assert this.fExclusiveLock;
assert datasize >= 0;
assert datasize <= MAX_MALLOC_SIZE;
- int needDeltas= divideRoundingUp(datasize + BLOCK_HEADER_SIZE, BLOCK_SIZE_DELTA);
- if (needDeltas < MIN_BLOCK_DELTAS) {
- needDeltas= MIN_BLOCK_DELTAS;
+ long result;
+ int usedSize;
+ if (datasize >= MAX_SINGLE_BLOCK_MALLOC_SIZE) {
+ int newChunkNum = createLargeBlock(datasize);
+ usedSize = Math.abs(getBlockHeaderForChunkNum(newChunkNum)) * CHUNK_SIZE;
+ result = newChunkNum * CHUNK_SIZE + LargeBlock.HEADER_SIZE;
+ // Note that we identify large blocks by setting their block size to 0.
+ clearRange(result, usedSize - LargeBlock.HEADER_SIZE - LargeBlock.FOOTER_SIZE);
+ result = result + BLOCK_HEADER_SIZE;
+ } else {
+ long freeBlock = 0;
+ int needDeltas = divideRoundingUp(datasize + BLOCK_HEADER_SIZE, BLOCK_SIZE_DELTA);
+ if (needDeltas < MIN_BLOCK_DELTAS) {
+ needDeltas = MIN_BLOCK_DELTAS;
+ }
+
+ // Which block size.
+ int useDeltas;
+ for (useDeltas = needDeltas; useDeltas <= MAX_BLOCK_DELTAS; useDeltas++) {
+ freeBlock = getFirstBlock(useDeltas * BLOCK_SIZE_DELTA);
+ if (freeBlock != 0)
+ break;
+ }
+
+ // Get the block.
+ Chunk chunk;
+ if (freeBlock == 0) {
+ // Allocate a new chunk.
+ freeBlock = (long) (createLargeBlock(datasize)) * (long) CHUNK_SIZE + LargeBlock.HEADER_SIZE;
+ useDeltas = MAX_BLOCK_DELTAS;
+ chunk = getChunk(freeBlock);
+ } else {
+ chunk = getChunk(freeBlock);
+ removeBlock(chunk, useDeltas * BLOCK_SIZE_DELTA, freeBlock);
+ }
+
+ final int unusedDeltas = useDeltas - needDeltas;
+ if (unusedDeltas >= MIN_BLOCK_DELTAS) {
+ // Add in the unused part of our block.
+ addBlock(chunk, unusedDeltas * BLOCK_SIZE_DELTA, freeBlock + needDeltas * BLOCK_SIZE_DELTA);
+ useDeltas = needDeltas;
+ }
+
+ // Make our size negative to show in use.
+ usedSize = useDeltas * BLOCK_SIZE_DELTA;
+ chunk.putShort(freeBlock, (short) -usedSize);
+
+ // Clear out the block, lots of people are expecting this.
+ chunk.clear(freeBlock + BLOCK_HEADER_SIZE, usedSize - BLOCK_HEADER_SIZE);
+ result = freeBlock + BLOCK_HEADER_SIZE;
}
- // Which block size.
- long freeblock = 0;
- int useDeltas;
- for (useDeltas= needDeltas; useDeltas <= MAX_BLOCK_DELTAS; useDeltas++) {
- freeblock = getFirstBlock(useDeltas * BLOCK_SIZE_DELTA);
- if (freeblock != 0)
- break;
+ this.malloced += usedSize;
+ this.memoryUsage.recordMalloc(poolId, usedSize);
+ return result;
+ }
+
+ /**
+ * Clears all the bytes in the given range by setting them to zero.
+ *
+ * @param startAddress first address to clear
+ * @param bytesToClear number of addresses to clear
+ */
+ public void clearRange(long startAddress, int bytesToClear) {
+ if (bytesToClear == 0) {
+ return;
+ }
+ long endAddress = startAddress + bytesToClear;
+ assert endAddress <= this.fChunksUsed * CHUNK_SIZE;
+ int blockNumber = (int) (startAddress / CHUNK_SIZE);
+ int firstBlockBytesToClear = Math.min((int) (((blockNumber + 1) * CHUNK_SIZE) - startAddress), bytesToClear);
+
+ Chunk firstBlock = getChunk(startAddress);
+ firstBlock.clear(startAddress, firstBlockBytesToClear);
+ startAddress += firstBlockBytesToClear;
+ bytesToClear -= firstBlockBytesToClear;
+ while (bytesToClear > CHUNK_SIZE) {
+ Chunk nextBlock = getChunk(startAddress);
+ nextBlock.clear(startAddress, CHUNK_SIZE);
+ startAddress += CHUNK_SIZE;
+ bytesToClear -= CHUNK_SIZE;
+ }
+
+ if (bytesToClear > 0) {
+ Chunk nextBlock = getChunk(startAddress);
+ nextBlock.clear(startAddress, bytesToClear);
}
+ }
- // Get the block.
- Chunk chunk;
- if (freeblock == 0) {
- // Allocate a new chunk.
- freeblock= createNewChunk();
- useDeltas = MAX_BLOCK_DELTAS;
- chunk = getChunk(freeblock);
+ /**
+ * Obtains a new block that can fit the given number of bytes (at minimum). Returns the
+ * chunk number.
+ *
+ * @param datasize minimum number of bytes needed
+ * @return the chunk number
+ */
+ private int createLargeBlock(long datasize) {
+ final int neededChunks = getChunksNeededForBytes(datasize);
+ int freeBlockChunkNum = getFreeBlockFromTrie(neededChunks);
+ final int numChunks;
+
+ if (freeBlockChunkNum == 0) {
+ final int lastChunkNum = this.fChunksUsed;
+
+ numChunks = neededChunks;
+
+ // Check if the last block in the database is free. If so, unlink and expand it.
+ int lastBlockSize = getBlockFooterForChunkBefore(lastChunkNum);
+ if (lastBlockSize > 0) {
+ int startChunkNum = getFirstChunkOfBlockBefore(lastChunkNum);
+
+ unlinkFreeBlock(startChunkNum);
+ // Allocate additional new chunks such that the new chunk is large enough to
+ // handle this allocation.
+ createNewChunks(neededChunks - lastBlockSize);
+ freeBlockChunkNum = startChunkNum;
+ } else {
+ freeBlockChunkNum = createNewChunks(numChunks);
+ }
} else {
- chunk = getChunk(freeblock);
- removeBlock(chunk, useDeltas * BLOCK_SIZE_DELTA, freeblock);
+ numChunks = getBlockHeaderForChunkNum(freeBlockChunkNum);
+
+ unlinkFreeBlock(freeBlockChunkNum);
}
- final int unusedDeltas = useDeltas - needDeltas;
- if (unusedDeltas >= MIN_BLOCK_DELTAS) {
- // Add in the unused part of our block.
- addBlock(chunk, unusedDeltas * BLOCK_SIZE_DELTA, freeblock + needDeltas * BLOCK_SIZE_DELTA);
- useDeltas= needDeltas;
+ final int resultChunkNum;
+ if (numChunks > neededChunks) {
+ // If the chunk we've selected is larger than necessary, split it. We have the
+ // choice of using either half of the block. In the interest of leaving more
+ // opportunities of merging large blocks, we leave the unused half of the block
+ // next to the larger adjacent block.
+ final long nextBlockChunkNum = freeBlockChunkNum + numChunks;
+
+ final int nextBlockSize = Math.abs(getBlockHeaderForChunkNum(nextBlockChunkNum));
+ final int prevBlockSize = Math.abs(getBlockFooterForChunkBefore(freeBlockChunkNum));
+
+ final int unusedChunks = numChunks - neededChunks;
+ if (nextBlockSize >= prevBlockSize) {
+ // Use the start of the block
+ resultChunkNum = freeBlockChunkNum;
+ // Return the last half of the block to the free block pool
+ linkFreeBlockToTrie(freeBlockChunkNum + neededChunks, unusedChunks);
+ } else {
+ // Use the end of the block
+ resultChunkNum = freeBlockChunkNum + neededChunks;
+ // Return the first half of the block to the free block pool
+ linkFreeBlockToTrie(freeBlockChunkNum, unusedChunks);
+ }
+ } else {
+ resultChunkNum = freeBlockChunkNum;
}
- // Make our size negative to show in use.
- final int usedSize= useDeltas * BLOCK_SIZE_DELTA;
- chunk.putShort(freeblock, (short) -usedSize);
+ // Fill in the header and footer
+ setBlockHeader(resultChunkNum, -neededChunks);
+ return resultChunkNum;
+ }
- // Clear out the block, lots of people are expecting this.
- chunk.clear(freeblock + BLOCK_HEADER_SIZE, usedSize - BLOCK_HEADER_SIZE);
+ /**
+ * Unlinks a free block (which currently belongs to the free block trie) so that it may
+ * be reused.
+ *
+ * @param freeBlockChunkNum chunk number of the block to be unlinked
+ */
+ private void unlinkFreeBlock(int freeBlockChunkNum) {
+ long freeBlockAddress = freeBlockChunkNum * CHUNK_SIZE;
+ int anotherBlockOfSameSize = 0;
+ int nextBlockChunkNum = getInt(freeBlockAddress + LargeBlock.NEXT_BLOCK_OFFSET);
+ int prevBlockChunkNum = getInt(freeBlockAddress + LargeBlock.PREV_BLOCK_OFFSET);
+ // Relink the linked list
+ if (nextBlockChunkNum != 0) {
+ anotherBlockOfSameSize = nextBlockChunkNum;
+ putInt(nextBlockChunkNum * CHUNK_SIZE + LargeBlock.PREV_BLOCK_OFFSET, prevBlockChunkNum);
+ }
+ if (prevBlockChunkNum != 0) {
+ anotherBlockOfSameSize = prevBlockChunkNum;
+ putInt(prevBlockChunkNum * CHUNK_SIZE + LargeBlock.NEXT_BLOCK_OFFSET, nextBlockChunkNum);
+ }
+
+ long root = getInt(FREE_BLOCK_OFFSET);
+ if (root == freeBlockChunkNum) {
+ putInt(FREE_BLOCK_OFFSET, 0);
+ }
+
+ int freeBlockSize = getBlockHeaderForChunkNum(freeBlockChunkNum);
+ int parentChunkNum = getInt(freeBlockAddress + LargeBlock.PARENT_OFFSET);
+ if (parentChunkNum != 0) {
+ int currentSize = getBlockHeaderForChunkNum(parentChunkNum);
+ int difference = currentSize ^ freeBlockSize;
+ if (difference != 0) {
+ int firstDifference = LargeBlock.SIZE_OF_SIZE_FIELD * 8 - Integer.numberOfLeadingZeros(difference) - 1;
+ long locationOfChildPointer = parentChunkNum * CHUNK_SIZE + LargeBlock.CHILD_TABLE_OFFSET
+ + (firstDifference * INT_SIZE);
+ putInt(locationOfChildPointer, 0);
+ }
+ }
+
+ if (anotherBlockOfSameSize != 0) {
+ insertChild(parentChunkNum, anotherBlockOfSameSize);
+ }
+
+ int currentParent = parentChunkNum;
+ for (int childIdx = 0; childIdx < LargeBlock.ENTRIES_IN_CHILD_TABLE; childIdx++) {
+ int nextChildChunkNum = getInt(freeBlockAddress + LargeBlock.CHILD_TABLE_OFFSET + (childIdx * INT_SIZE));
+ if (nextChildChunkNum != 0) {
+ insertChild(currentParent, nextChildChunkNum);
+ // Parent all subsequent children under the child that was most similar to the old parent
+ if (currentParent == parentChunkNum) {
+ currentParent = nextChildChunkNum;
+ }
+ }
+ }
- this.malloced += usedSize;
- long result = freeblock + BLOCK_HEADER_SIZE;
- this.memoryUsage.recordMalloc(poolId, usedSize);
- return result;
}
- private long createNewChunk() throws IndexException {
- assert this.fExclusiveLock;
- synchronized (this.fCache) {
- final int newChunkIndex = this.fChunksUsed; // fChunks.length;
+ /**
+ * Returns the chunk number of a free block that contains at least the given number of chunks, or
+ * 0 if there is no existing contiguous free block containing at least the given number of chunks.
+ *
+ * @param numChunks minumum number of chunks desired
+ * @return the chunk number of a free block containing at least the given number of chunks or 0
+ * if there is no existing free block containing that many chunks.
+ */
+ private int getFreeBlockFromTrie(int numChunks) {
+ int currentChunkNum = getInt(FREE_BLOCK_OFFSET);
- final Chunk chunk = new Chunk(this, newChunkIndex);
- chunk.fDirty = true;
+ int resultChunkNum = getSmallestChildNoSmallerThan(currentChunkNum, numChunks);
+ if (resultChunkNum == 0) {
+ return 0;
+ }
- if (newChunkIndex >= this.fChunksAllocated) {
- int increment = Math.max(1024, this.fChunksAllocated / 20);
- Chunk[] newchunks = new Chunk[this.fChunksAllocated + increment];
- System.arraycopy(this.fChunks, 0, newchunks, 0, this.fChunksAllocated);
+ // Try not to return the trie node itself if there is a linked list entry available, since unlinking
+ // something from the linked list is faster than unlinking a trie node.
+ int nextResultChunkNum = getInt(resultChunkNum * CHUNK_SIZE + LargeBlock.NEXT_BLOCK_OFFSET);
+ if (nextResultChunkNum != 0) {
+ return nextResultChunkNum;
+ }
+ return resultChunkNum;
+ }
- this.fChunks = newchunks;
- this.fChunksAllocated += increment;
+ /**
+ * Given the chunk number of a block somewhere in the free space trie, this returns the smallest
+ * child in the subtree that is no smaller than the given number of chunks.
+ *
+ * @param trieNodeChunkNum chunk number of a block in the free space trie
+ * @param numChunks desired number of chunks
+ * @return the chunk number of the first chunk in a contiguous free block containing at least the
+ * given number of chunks
+ */
+ private int getSmallestChildNoSmallerThan(int trieNodeChunkNum, int numChunks) {
+ if (trieNodeChunkNum == 0) {
+ return 0;
+ }
+ int currentSize = getBlockHeaderForChunkNum(trieNodeChunkNum);
+ assert (currentSize >= 0);
+ int difference = currentSize ^ numChunks;
+ if (difference == 0) {
+ return trieNodeChunkNum;
+ }
+
+ int bitMask = Integer.highestOneBit(difference);
+ int firstDifference = LargeBlock.SIZE_OF_SIZE_FIELD * 8 - Integer.numberOfLeadingZeros(bitMask) - 1;
+ boolean lookingForSmallerChild = (currentSize > numChunks);
+ for (int testPosition = firstDifference; testPosition < LargeBlock.ENTRIES_IN_CHILD_TABLE; testPosition++) {
+ if (((currentSize & bitMask) != 0) == lookingForSmallerChild) {
+ int nextChildChunkNum = getInt(
+ trieNodeChunkNum * CHUNK_SIZE + LargeBlock.CHILD_TABLE_OFFSET + (testPosition * PTR_SIZE));
+ int childResultChunkNum = getSmallestChildNoSmallerThan(nextChildChunkNum, numChunks);
+ if (childResultChunkNum != 0) {
+ return childResultChunkNum;
+ }
}
- this.fChunksUsed += 1;
- this.fChunks[newChunkIndex] = chunk;
+ bitMask <<= 1;
+ }
- this.fCache.add(chunk, true);
- long address = (long) newChunkIndex * CHUNK_SIZE;
+ if (lookingForSmallerChild) {
+ return trieNodeChunkNum;
+ } else {
+ return 0;
+ }
+ }
+
+ /**
+ * Link the given unused block into the free block tries. The block does not need to have
+ * its header filled in already.
+ *
+ * @param freeBlockChunkNum chunk number of the start of the block
+ * @param numChunks number of chunks in the block
+ */
+ private void linkFreeBlockToTrie(int freeBlockChunkNum, int numChunks) {
+ setBlockHeader(freeBlockChunkNum, numChunks);
+ long freeBlockAddress = freeBlockChunkNum * CHUNK_SIZE;
+ Chunk chunk = getChunk(freeBlockAddress);
+ chunk.clear(freeBlockAddress + LargeBlock.HEADER_SIZE,
+ LargeBlock.UNALLOCATED_HEADER_SIZE - LargeBlock.HEADER_SIZE);
+
+ insertChild(getInt(FREE_BLOCK_OFFSET), freeBlockChunkNum);
+ }
+
+ /**
+ * Adds the given child block to the given parent subtree of the free space trie. Any existing
+ * subtree under the given child block will be retained.
+ *
+ * @param parentChunkNum root of the existing tree, or 0 if the child is going to be the new root
+ * @param newChildChunkNum the new child to insert
+ */
+ private void insertChild(int parentChunkNum, int newChildChunkNum) {
+ if (parentChunkNum == 0) {
+ putInt(newChildChunkNum * CHUNK_SIZE + LargeBlock.PARENT_OFFSET, parentChunkNum);
+ putInt(FREE_BLOCK_OFFSET, newChildChunkNum);
+ return;
+ }
+ int numChunks = getBlockHeaderForChunkNum(newChildChunkNum);
+ for (;;) {
+ int currentSize = getBlockHeaderForChunkNum(parentChunkNum);
+ int difference = currentSize ^ numChunks;
+ if (difference == 0) {
+ // The newly added item is exactly the same size as this trie node
+ insertFreeBlockAfter(parentChunkNum, newChildChunkNum);
+ return;
+ }
+
+ int firstDifference = LargeBlock.SIZE_OF_SIZE_FIELD * 8 - Integer.numberOfLeadingZeros(difference) - 1;
+ long locationOfChildPointer = parentChunkNum * CHUNK_SIZE + LargeBlock.CHILD_TABLE_OFFSET
+ + (firstDifference * INT_SIZE);
+ int childChunkNum = getInt(locationOfChildPointer);
+ if (childChunkNum == 0) {
+ putInt(locationOfChildPointer, newChildChunkNum);
+ putInt(newChildChunkNum * CHUNK_SIZE + LargeBlock.PARENT_OFFSET, parentChunkNum);
+ return;
+ }
+ parentChunkNum = childChunkNum;
+ }
+ }
+
+ /**
+ * Adds the given block to the linked list of equally-sized free chunks in the free space trie.
+ * Both chunks must be unused, must be the same size, and the previous chunk must already
+ * be linked into the free space trie. The newly-added chunk must not have any children.
+ *
+ * @param prevChunkNum chunk number of previous block in the existing list
+ * @param newChunkNum new chunk to be added to the list
+ */
+ private void insertFreeBlockAfter(int prevChunkNum, int newChunkNum) {
+ long prevChunkAddress = (long) prevChunkNum * CHUNK_SIZE;
+ int nextChunkNum = getInt(prevChunkAddress + LargeBlock.NEXT_BLOCK_OFFSET);
+ long nextChunkAddress = (long) nextChunkNum * CHUNK_SIZE;
+ long newLockAddress = (long) newChunkNum * CHUNK_SIZE;
+
+ putInt(prevChunkAddress + LargeBlock.NEXT_BLOCK_OFFSET, newChunkNum);
+ if (nextChunkNum != 0) {
+ putInt(nextChunkAddress + LargeBlock.PREV_BLOCK_OFFSET, newChunkNum);
+ }
+ putInt(newLockAddress + LargeBlock.PREV_BLOCK_OFFSET, prevChunkNum);
+ putInt(newLockAddress + LargeBlock.NEXT_BLOCK_OFFSET, nextChunkNum);
+ }
+
+ /**
+ * Returns the chunk number of the chunk at the start of a block, given the
+ * chunk number of the chunk at the start of the following block.
+ *
+ * @param chunkNum the chunk number of the chunk immediately following the
+ * chunk being queried
+ * @return the chunk number of the chunk at the start of the previous block
+ */
+ private int getFirstChunkOfBlockBefore(int chunkNum) {
+ int blockChunks = Math.abs(getBlockFooterForChunkBefore(chunkNum));
+ return chunkNum - blockChunks;
+ }
+
+ /**
+ * Sets the block header and footer for the given range of chunks which make
+ * up a contiguous block.
+ *
+ * @param firstChunkNum chunk number of the first chunk in the block
+ * @param headerContent the content of the header. Its magnitude is the number of
+ * chunks in the block. It is positive if the chunk is free and negative if
+ * the chunk is in use.
+ */
+ private void setBlockHeader(int firstChunkNum, int headerContent) {
+ assert headerContent != 0;
+ assert firstChunkNum < this.fChunksUsed;
+ int numBlocks = Math.abs(headerContent);
+ long firstChunkAddress = firstChunkNum * CHUNK_SIZE;
+ putInt(firstChunkAddress, headerContent);
+ putInt(firstChunkAddress + (numBlocks * CHUNK_SIZE) - LargeBlock.FOOTER_SIZE, headerContent);
+ }
+
+ /**
+ * Returns the size of the block (in number of chunks) starting at the given address. The return value is positive
+ * if the block is free and negative if the block is allocated.
+ */
+ private int getBlockHeaderForChunkNum(long firstChunkNum) {
+ if (firstChunkNum >= this.fChunksUsed) {
+ return 0;
+ }
+ return getInt(firstChunkNum * CHUNK_SIZE);
+ }
+
+ /**
+ * Returns the size of the block (in number of chunks), given the (non-inclusive) address that the block ends at.
+ * The return value is positive if the block is free and negative if the block is allocated.
+ */
+ private int getBlockFooterForChunkBefore(int chunkNum) {
+ if (chunkNum < 2) {
+ // Don't report the database header as a normal chunk.
+ return 0;
+ }
+ return getInt(chunkNum * CHUNK_SIZE - LargeBlock.FOOTER_SIZE);
+ }
+
+ private int createNewChunks(int numChunks) throws IndexException {
+ assert this.fExclusiveLock;
+ synchronized (this.fCache) {
+ final int firstChunkIndex = this.fChunksUsed;
+ final int lastChunkIndex = firstChunkIndex + numChunks - 1;
+
+ final Chunk lastChunk = new Chunk(this, lastChunkIndex);
+ lastChunk.fDirty = true;
+
+ if (lastChunkIndex >= this.fChunks.length) {
+ int increment = Math.max(1024, this.fChunks.length / 20);
+ int newNumChunks = Math.max(lastChunkIndex + 1, this.fChunks.length + increment);
+ Chunk[] newChunks = new Chunk[newNumChunks];
+ System.arraycopy(this.fChunks, 0, newChunks, 0, this.fChunks.length);
+ this.fChunks = newChunks;
+ }
+
+ this.fChunksUsed = lastChunkIndex + 1;
+ this.fChunks[lastChunkIndex] = lastChunk;
+ this.fCache.add(lastChunk, true);
+ long result = (long) firstChunkIndex * CHUNK_SIZE;
/*
* Non-dense pointers are at most 31 bits dense pointers are at most 35 bits Check the sizes here and throw
@@ -470,33 +839,15 @@ public class Database {
* indexing operation should be stopped. This is desired since generally, once the max size is exceeded,
* there are lots of errors.
*/
- if (address >= MAX_DB_SIZE) {
+ long endAddress = result + (numChunks * CHUNK_SIZE);
+ if (endAddress > MAX_DB_SIZE) {
Object bindings[] = { this.getLocation().getAbsolutePath(), MAX_DB_SIZE };
throw new IndexException(new Status(IStatus.ERROR, Package.PLUGIN_ID, Package.STATUS_DATABASE_TOO_LARGE,
- NLS.bind("Database too large! Address = " + address + ", max size = " + MAX_DB_SIZE, bindings), //$NON-NLS-1$ //$NON-NLS-2$
- null));
+ NLS.bind("Database too large! Address = " + endAddress + ", max size = " + MAX_DB_SIZE, //$NON-NLS-1$ //$NON-NLS-2$
+ bindings), null));
}
- return address;
- }
- }
- /**
- * For testing purposes, only.
- */
- private long createNewChunks(int numChunks) throws IndexException {
- assert this.fExclusiveLock;
- synchronized (this.fCache) {
- final int oldLen= this.fChunks.length;
- Chunk[] newchunks = new Chunk[oldLen + numChunks];
- System.arraycopy(this.fChunks, 0, newchunks, 0, oldLen);
- final Chunk chunk= new Chunk(this, oldLen + numChunks - 1);
- chunk.fDirty= true;
- newchunks[ oldLen + numChunks - 1 ] = chunk;
- this.fChunks= newchunks;
- this.fCache.add(chunk, true);
- this.fChunksAllocated=oldLen + numChunks;
- this.fChunksUsed=oldLen + numChunks;
- return (long) (oldLen + numChunks - 1) * CHUNK_SIZE;
+ return firstChunkIndex;
}
}
@@ -544,26 +895,74 @@ public class Database {
/**
* Free an allocated block.
*
- * @param address memory address to be freed
- * @param poolId the same ID that was previously passed into malloc when allocating this memory address
+ * @param address
+ * memory address to be freed
+ * @param poolId
+ * the same ID that was previously passed into malloc when allocating this memory address
*/
public void free(long address, short poolId) throws IndexException {
assert this.fExclusiveLock;
if (address == 0) {
return;
}
- // TODO Look for opportunities to merge blocks
+ long blockSize;
long block = address - BLOCK_HEADER_SIZE;
Chunk chunk = getChunk(block);
- int blocksize = - chunk.getShort(block);
- if (blocksize < 0) {
- // Already freed.
- throw new IndexException(new Status(IStatus.ERROR, Package.PLUGIN_ID, 0,
- "Already freed record " + address, new Exception())); //$NON-NLS-1$
+ blockSize = -chunk.getShort(block);
+ // We use a block size of 0 to indicate a large block that fills a range of chunks
+ if (blockSize == 0) {
+ int offsetIntoChunk = (int) (address % CHUNK_SIZE);
+ assert offsetIntoChunk == LargeBlock.HEADER_SIZE + BLOCK_HEADER_SIZE;
+ // Deallocating a large block
+ // This was a large block. It uses a sequence of full chunks.
+ int chunkNum = (int) (address / CHUNK_SIZE);
+ int numChunks = -getBlockHeaderForChunkNum(chunkNum);
+ if (numChunks < 0) {
+ // Already freed.
+ throw new IndexException(new Status(IStatus.ERROR, Package.PLUGIN_ID, 0,
+ "Already freed large block " + address, new Exception())); //$NON-NLS-1$
+ }
+ blockSize = numChunks * CHUNK_SIZE;
+ freeLargeChunk(chunkNum, numChunks);
+ } else {
+ // Deallocating a normal block
+ // TODO Look for opportunities to merge small blocks
+ if (blockSize < 0) {
+ // Already freed.
+ throw new IndexException(new Status(IStatus.ERROR, Package.PLUGIN_ID, 0,
+ "Already freed record " + address, new Exception())); //$NON-NLS-1$
+ }
+ addBlock(chunk, (int) blockSize, block);
}
- addBlock(chunk, blocksize, block);
- this.freed += blocksize;
- this.memoryUsage.recordFree(poolId, blocksize);
+
+ this.freed += blockSize;
+ this.memoryUsage.recordFree(poolId, blockSize);
+ }
+
+ private void freeLargeChunk(int chunkNum, int numChunks) {
+ assert chunkNum > 0;
+ assert numChunks > 0;
+ int prevBlockHeader = getBlockFooterForChunkBefore(chunkNum);
+ int nextBlockChunkNum = chunkNum + numChunks;
+ int nextBlockHeader = getBlockHeaderForChunkNum(nextBlockChunkNum);
+
+ // If the previous block is unused, merge with it
+ if (prevBlockHeader > 0) {
+ int prevBlockChunkNum = getFirstChunkOfBlockBefore(chunkNum);
+
+ unlinkFreeBlock(prevBlockChunkNum);
+ chunkNum = prevBlockChunkNum;
+ numChunks += prevBlockHeader;
+ }
+
+ // If the next block is unused, merge with it
+ if (nextBlockHeader > 0) {
+ unlinkFreeBlock(nextBlockChunkNum);
+ numChunks += nextBlockHeader;
+ }
+
+ // Block merging is done. Now reinsert the merged block into the free space trie
+ linkFreeBlockToTrie(chunkNum, numChunks);
}
public void putByte(long offset, byte value) throws IndexException {
@@ -745,7 +1144,7 @@ public class Database {
this.memoryUsage.refresh();
this.fHeaderChunk.fDirty= false;
this.fChunks= new Chunk[] { null };
- this.fChunksUsed = this.fChunksAllocated = this.fChunks.length;
+ this.fChunksUsed = this.fChunks.length;
try {
this.fFile.close();
} catch (IOException e) {
@@ -931,6 +1330,10 @@ public class Database {
return this.fFile.length();
}
+ public int getChunkCount() {
+ return this.fChunksUsed;
+ }
+
/**
* A Record Pointer is a pointer as returned by Database.malloc().
* This is a pointer to a block + BLOCK_HEADER_SIZE.
@@ -953,4 +1356,19 @@ public class Database {
public MemoryStats getMemoryStats() {
return this.memoryUsage;
}
+
+ /**
+ * Returns the number of bytes that can fit in the payload of the given number of chunks.
+ */
+ public static long getBytesThatFitInChunks(int numChunks) {
+ return CHUNK_SIZE * numChunks - LargeBlock.HEADER_SIZE - LargeBlock.FOOTER_SIZE - BLOCK_HEADER_SIZE;
+ }
+
+ /**
+ * Returns the number of chunks needed to fit the given number of bytes of payload.
+ */
+ public static int getChunksNeededForBytes(long datasize) {
+ return divideRoundingUp(datasize + BLOCK_HEADER_SIZE + LargeBlock.HEADER_SIZE + LargeBlock.FOOTER_SIZE,
+ CHUNK_SIZE);
+ }
}
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LargeBlock.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LargeBlock.java
new file mode 100644
index 000000000..26af53cdc
--- /dev/null
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LargeBlock.java
@@ -0,0 +1,26 @@
+package org.eclipse.jdt.internal.core.nd.db;
+
+public class LargeBlock {
+ public static final int SIZE_OFFSET = 0;
+ public static final int SIZE_OF_SIZE_FIELD = Database.INT_SIZE;
+ /**
+ * Size of the header for a large block. The header consists of a int which holds the number of chunks in the block.
+ * It is negative for an allocated block and positive for an unallocated block. The header is located at the start
+ * of the large block.
+ */
+ public static final int HEADER_SIZE = Math.max(Database.INT_SIZE, Database.BLOCK_SIZE_DELTA);
+
+ public static final int ENTRIES_IN_CHILD_TABLE = SIZE_OF_SIZE_FIELD * 8;
+ public static final int CHILD_TABLE_OFFSET = HEADER_SIZE;
+ public static final int PARENT_OFFSET = CHILD_TABLE_OFFSET + (Database.INT_SIZE * ENTRIES_IN_CHILD_TABLE);
+ public static final int PREV_BLOCK_OFFSET = PARENT_OFFSET + Database.INT_SIZE;
+ public static final int NEXT_BLOCK_OFFSET = PREV_BLOCK_OFFSET + Database.INT_SIZE;
+
+ public static final int UNALLOCATED_HEADER_SIZE = NEXT_BLOCK_OFFSET + Database.INT_SIZE;
+
+ /**
+ * The large block footer is located at the end of the last chunk in the large block. It is an exact copy of the
+ * header.
+ */
+ public static final int FOOTER_SIZE = HEADER_SIZE;
+}
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LongString.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LongString.java
index c78b7f909..eb0e48af3 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LongString.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/LongString.java
@@ -29,13 +29,13 @@ public class LongString implements IString {
private static final int NEXT1 = 4;
private static final int CHARS1 = 8;
- private static final int NUM_CHARS1 = (Database.MAX_MALLOC_SIZE - CHARS1) / 2;
+ private static final int NUM_CHARS1 = (Database.MAX_SINGLE_BLOCK_MALLOC_SIZE - CHARS1) / 2;
// Additional fields of subsequent records.
private static final int NEXTN = 0;
private static final int CHARSN = 4;
- private static final int NUM_CHARSN = (Database.MAX_MALLOC_SIZE - CHARSN) / 2;
+ private static final int NUM_CHARSN = (Database.MAX_SINGLE_BLOCK_MALLOC_SIZE - CHARSN) / 2;
public LongString(Database db, long record) {
this.db = db;
@@ -47,7 +47,7 @@ public class LongString implements IString {
final int numCharsn = useBytes ? NUM_CHARSN * 2 : NUM_CHARSN;
this.db = db;
- this.record = db.malloc(Database.MAX_MALLOC_SIZE, Database.POOL_STRING_LONG);
+ this.record = db.malloc(Database.MAX_SINGLE_BLOCK_MALLOC_SIZE, Database.POOL_STRING_LONG);
// Write the first record.
final int length = chars.length;
@@ -64,7 +64,7 @@ public class LongString implements IString {
long lastNext = this.record + NEXT1;
int start = numChars1;
while (length - start > numCharsn) {
- long nextRecord = db.malloc(Database.MAX_MALLOC_SIZE, Database.POOL_STRING_LONG);
+ long nextRecord = db.malloc(Database.MAX_SINGLE_BLOCK_MALLOC_SIZE, Database.POOL_STRING_LONG);
db.putRecPtr(lastNext, nextRecord);
chunk= db.getChunk(nextRecord);
if (useBytes) {
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/ShortString.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/ShortString.java
index 09992a564..eb56c6c5e 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/ShortString.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/db/ShortString.java
@@ -25,7 +25,7 @@ public class ShortString implements IString {
private static final int LENGTH = 0;
private static final int CHARS = 4;
- public static final int MAX_BYTE_LENGTH = Database.MAX_MALLOC_SIZE - CHARS;
+ public static final int MAX_BYTE_LENGTH = Database.MAX_SINGLE_BLOCK_MALLOC_SIZE - CHARS;
public ShortString(Database db, long offset) {
this.db = db;
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/ClassFileToIndexConverter.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/ClassFileToIndexConverter.java
index afd740ee0..d52ce330e 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/ClassFileToIndexConverter.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/ClassFileToIndexConverter.java
@@ -16,9 +16,7 @@ import java.util.List;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IProgressMonitor;
-import org.eclipse.jdt.core.IClassFile;
import org.eclipse.jdt.core.compiler.CharOperation;
-import org.eclipse.jdt.internal.compiler.classfmt.ClassFormatException;
import org.eclipse.jdt.internal.compiler.codegen.AnnotationTargetTypeConstants;
import org.eclipse.jdt.internal.compiler.env.ClassSignature;
import org.eclipse.jdt.internal.compiler.env.EnumConstantSignature;
@@ -69,8 +67,6 @@ import org.eclipse.jdt.internal.core.nd.java.NdTypeInterface;
import org.eclipse.jdt.internal.core.nd.java.NdTypeParameter;
import org.eclipse.jdt.internal.core.nd.java.NdTypeSignature;
import org.eclipse.jdt.internal.core.nd.java.NdVariable;
-import org.eclipse.jdt.internal.core.nd.java.model.BinaryTypeDescriptor;
-import org.eclipse.jdt.internal.core.nd.java.model.BinaryTypeFactory;
import org.eclipse.jdt.internal.core.nd.util.CharArrayUtils;
import org.eclipse.jdt.internal.core.util.CharArrayBuffer;
import org.eclipse.jdt.internal.core.util.Util;
@@ -97,12 +93,6 @@ public final class ClassFileToIndexConverter {
return this.resource.getNd();
}
- public static IBinaryType getTypeFromClassFile(IClassFile iClassFile, IProgressMonitor monitor)
- throws CoreException, ClassFormatException {
- BinaryTypeDescriptor descriptor = BinaryTypeFactory.createDescriptor(iClassFile);
- return BinaryTypeFactory.rawReadType(descriptor, true);
- }
-
/**
* Create a type info from the given class file in a jar and adds it to the given list of infos.
*
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/Indexer.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/Indexer.java
index 7ca27245d..23807803c 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/Indexer.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/indexer/Indexer.java
@@ -10,7 +10,7 @@
*******************************************************************************/
package org.eclipse.jdt.internal.core.nd.indexer;
-import java.io.File;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
@@ -567,14 +567,6 @@ public final class Indexer {
String pathString = thePath.toString();
JavaIndex javaIndex = JavaIndex.getIndex(this.nd);
- File theFile = thePath.toFile();
- if (!(theFile.exists() && theFile.isFile())) {
- if (DEBUG) {
- Package.log("the file " + pathString + " does not exist", null); //$NON-NLS-1$ //$NON-NLS-2$
- }
- return 0;
- }
-
NdResourceFile resourceFile;
this.nd.acquireWriteLock(subMonitor.split(5));
@@ -596,9 +588,11 @@ public final class Indexer {
if (DEBUG) {
Package.logInfo("rescanning " + thePath.toString() + ", " + fingerprint); //$NON-NLS-1$ //$NON-NLS-2$
}
- int result;
+ int result = 0;
try {
- result = addElement(resourceFile, element, subMonitor.split(50));
+ if (fingerprint.fileExists()) {
+ result = addElement(resourceFile, element, subMonitor.split(50));
+ }
} catch (JavaModelException e) {
if (DEBUG) {
Package.log("the file " + pathString + " cannot be indexed due to a recoverable error", null); //$NON-NLS-1$ //$NON-NLS-2$
@@ -618,6 +612,12 @@ public final class Indexer {
Package.log("A RuntimeException occurred while indexing " + pathString, e); //$NON-NLS-1$
}
throw e;
+ } catch (FileNotFoundException e) {
+ fingerprint = FileFingerprint.getEmpty();
+ }
+
+ if (DEBUG && !fingerprint.fileExists()) {
+ Package.log("the file " + pathString + " was not indexed because it does not exist", null); //$NON-NLS-1$ //$NON-NLS-2$
}
List<NdResourceFile> allResourcesWithThisPath = Collections.emptyList();
@@ -655,9 +655,10 @@ public final class Indexer {
/**
* Adds an archive to the index, under the given NdResourceFile.
+ * @throws FileNotFoundException if the file does not exist
*/
private int addElement(NdResourceFile resourceFile, IJavaElement element, IProgressMonitor monitor)
- throws JavaModelException {
+ throws JavaModelException, FileNotFoundException {
SubMonitor subMonitor = SubMonitor.convert(monitor);
if (element instanceof JarPackageFragmentRoot) {
@@ -711,6 +712,8 @@ public final class Indexer {
} catch (ZipException e) {
Package.log("The zip file " + jarRoot.getPath() + " was corrupt", e); //$NON-NLS-1$//$NON-NLS-2$
// Indicates a corrupt zip file. Treat this like an empty zip file.
+ } catch (FileNotFoundException e) {
+ throw e;
} catch (IOException ioException) {
throw new JavaModelException(ioException, IJavaModelStatusConstants.IO_EXCEPTION);
} catch (CoreException coreException) {
@@ -729,7 +732,7 @@ public final class Indexer {
boolean indexed = false;
try {
- ClassFileReader classFileReader = BinaryTypeFactory.rawReadType(descriptor, true);
+ ClassFileReader classFileReader = BinaryTypeFactory.rawReadTypeTestForExists(descriptor, true, false);
if (classFileReader != null) {
indexed = addClassToIndex(resourceFile, descriptor.fieldDescriptor, descriptor.indexPath,
classFileReader, iterationMonitor);
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/FileFingerprint.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/FileFingerprint.java
index f1b02622f..b5d606247 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/FileFingerprint.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/FileFingerprint.java
@@ -139,6 +139,15 @@ public class FileFingerprint {
}
/**
+ * Returns true iff the file existed at the time the fingerprint was computed.
+ *
+ * @return true iff the file existed at the time the fingerprint was computed.
+ */
+ public boolean fileExists() {
+ return !equals(EMPTY);
+ }
+
+ /**
* Compares the given File with the receiver. If the fingerprint matches (ie: the file
*/
public FingerprintTestResult test(IPath path, IProgressMonitor monitor) throws CoreException {
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/JavaIndex.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/JavaIndex.java
index f0cbe1f53..778d5da52 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/JavaIndex.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/JavaIndex.java
@@ -34,9 +34,9 @@ import org.eclipse.jdt.internal.core.nd.util.CharArrayUtils;
public class JavaIndex {
// Version constants
- static final int CURRENT_VERSION = Nd.version(1, 37);
- static final int MAX_SUPPORTED_VERSION = Nd.version(1, 37);
- static final int MIN_SUPPORTED_VERSION = Nd.version(1, 37);
+ static final int CURRENT_VERSION = Nd.version(1, 38);
+ static final int MAX_SUPPORTED_VERSION = Nd.version(1, 38);
+ static final int MIN_SUPPORTED_VERSION = Nd.version(1, 38);
// Fields for the search header
public static final FieldSearchIndex<NdResourceFile> FILES;
diff --git a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/model/BinaryTypeFactory.java b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/model/BinaryTypeFactory.java
index e9ce570ad..631c3847a 100644
--- a/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/model/BinaryTypeFactory.java
+++ b/org.eclipse.jdt.core/search/org/eclipse/jdt/internal/core/nd/java/model/BinaryTypeFactory.java
@@ -10,15 +10,19 @@
*******************************************************************************/
package org.eclipse.jdt.internal.core.nd.java.model;
+import java.io.FileNotFoundException;
import java.io.IOException;
+import java.io.InputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
import org.eclipse.core.resources.IFile;
+import org.eclipse.core.resources.IResourceStatus;
import org.eclipse.core.resources.ResourcesPlugin;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
+import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Path;
import org.eclipse.jdt.core.IClassFile;
import org.eclipse.jdt.core.IJavaElement;
@@ -112,9 +116,8 @@ public class BinaryTypeFactory {
* no such type exists.
* @throws ClassFormatException
*/
- public static IBinaryType readType(BinaryTypeDescriptor descriptor,
- IProgressMonitor monitor) throws JavaModelException, ClassFormatException {
-
+ public static IBinaryType readType(BinaryTypeDescriptor descriptor, IProgressMonitor monitor) throws JavaModelException, ClassFormatException {
+
if (JavaIndex.isEnabled()) {
try {
return readFromIndex(JavaIndex.getIndex(), descriptor, monitor);
@@ -126,6 +129,14 @@ public class BinaryTypeFactory {
return rawReadType(descriptor, true);
}
+ public static ClassFileReader rawReadType(BinaryTypeDescriptor descriptor, boolean fullyInitialize) throws JavaModelException, ClassFormatException {
+ try {
+ return rawReadTypeTestForExists(descriptor, fullyInitialize, true);
+ } catch (FileNotFoundException e) {
+ throw new JavaModelException(e, IJavaModelStatusConstants.IO_EXCEPTION);
+ }
+ }
+
/**
* Read the class file from disk, circumventing the index's cache. This should only be used by callers
* that need to read information from the class file which aren't present in the index (such as method bodies).
@@ -133,15 +144,18 @@ public class BinaryTypeFactory {
* @return the newly-created ClassFileReader or null if the given class file does not exist.
* @throws ClassFormatException if the class file existed but was corrupt
* @throws JavaModelException if unable to read the class file due to a transient failure
+ * @throws FileNotFoundException if the file does not exist
*/
- public static ClassFileReader rawReadType(BinaryTypeDescriptor descriptor, boolean fullyInitialize) throws JavaModelException, ClassFormatException {
+ public static ClassFileReader rawReadTypeTestForExists(BinaryTypeDescriptor descriptor, boolean fullyInitialize,
+ boolean useInvalidArchiveCache) throws JavaModelException, ClassFormatException, FileNotFoundException {
if (descriptor == null) {
return null;
}
if (descriptor.isInJarFile()) {
ZipFile zip = null;
try {
- zip = JavaModelManager.getJavaModelManager().getZipFile(new Path(new String(descriptor.workspacePath)));
+ zip = JavaModelManager.getJavaModelManager().getZipFile(new Path(new String(descriptor.workspacePath)),
+ useInvalidArchiveCache);
char[] entryNameCharArray = CharArrayUtils.concat(
JavaNames.fieldDescriptorToBinaryName(descriptor.fieldDescriptor), SuffixConstants.SUFFIX_class);
String entryName = new String(entryNameCharArray);
@@ -162,7 +176,18 @@ public class BinaryTypeFactory {
}
} else {
IFile file = ResourcesPlugin.getWorkspace().getRoot().getFile(new Path(new String(descriptor.workspacePath)));
- byte[] contents = Util.getResourceContentsAsByteArray(file);
+ byte[] contents;
+ try (InputStream stream = file.getContents(true)) {
+ contents = org.eclipse.jdt.internal.compiler.util.Util.getInputStreamAsByteArray(stream, -1);
+ } catch (CoreException e) {
+ IStatus status = e.getStatus();
+ if (status.getCode() == IResourceStatus.RESOURCE_NOT_FOUND) {
+ throw new FileNotFoundException();
+ }
+ throw new JavaModelException(e);
+ } catch (IOException e) {
+ throw new JavaModelException(e, IJavaModelStatusConstants.IO_EXCEPTION);
+ }
return new ClassFileReader(contents, file.getFullPath().toString().toCharArray(), fullyInitialize);
}
return null;

Back to the top