summaryrefslogtreecommitdiff
path: root/src/library
diff options
context:
space:
mode:
Diffstat (limited to 'src/library')
-rw-r--r--src/library/scala/collection/immutable/Vector.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala2
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala4
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala4
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala6
-rw-r--r--src/library/scala/collection/mutable/ObservableMap.scala2
-rw-r--r--src/library/scala/collection/mutable/ObservableSet.scala2
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala4
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala2
-rw-r--r--src/library/scala/collection/mutable/RevertibleHistory.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala8
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala32
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala2
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala2
-rw-r--r--src/library/scala/compat/Platform.scala2
-rw-r--r--src/library/scala/concurrent/Lock.scala4
-rw-r--r--src/library/scala/concurrent/pilib.scala4
-rw-r--r--src/library/scala/reflect/ScalaBeanInfo.scala2
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala2
-rw-r--r--src/library/scala/testing/Benchmark.scala4
-rw-r--r--src/library/scala/util/control/Breaks.scala2
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala4
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala2
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala4
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala6
-rw-r--r--src/library/scala/xml/persistent/CachedFileStorage.scala6
-rwxr-xr-xsrc/library/scala/xml/pull/XMLEventReader.scala2
27 files changed, 59 insertions, 59 deletions
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index 7a4a9bb18a..ab44585902 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -704,7 +704,7 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A
s
}
- def clear: Unit = {
+ def clear(): Unit = {
display0 = new Array[AnyRef](32)
depth = 1
blockIndex = 0
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 2323830b7b..012105d7c4 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -143,7 +143,7 @@ extends Seq[T]
def dup() = push(top)
/** Empties the stack. */
- def clear {
+ def clear() {
index = 0
table = new Array(1)
}
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index d78c466f69..f2e42f2ca1 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -241,7 +241,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
// discards the previous sizemap and populates the new one
- protected def sizeMapInitAndRebuild {
+ protected def sizeMapInitAndRebuild() {
// first allocate
sizeMapInit(table.length)
@@ -263,7 +263,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
}
- private[collection] def printSizeMap {
+ private[collection] def printSizeMap() {
println(sizemap.toList)
}
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index dea1ec7d44..0f6fde0260 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -265,7 +265,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
// discards the previous sizemap and populates the new one
- protected def sizeMapInitAndRebuild {
+ protected def sizeMapInitAndRebuild() {
sizeMapInit(table.length)
// go through the buckets, count elements
@@ -291,7 +291,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
}
- private[collection] def printSizeMap {
+ private[collection] def printSizeMap() {
println(sizemap.toList)
}
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index aa9bd7ac95..c38bf5fbf3 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -46,7 +46,7 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
val oldelement = apply(n)
super.update(n, newelement)
publish(new Update(Index(n), newelement) with Undoable {
- def undo { update(n, oldelement) }
+ def undo() { update(n, oldelement) }
})
}
@@ -54,7 +54,7 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
val oldelement = apply(n)
super.remove(n)
publish(new Remove(Index(n), oldelement) with Undoable {
- def undo { insert(n, oldelement) }
+ def undo() { insert(n, oldelement) }
})
oldelement
}
@@ -62,7 +62,7 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
abstract override def clear(): Unit = {
super.clear
publish(new Reset with Undoable {
- def undo { throw new UnsupportedOperationException("cannot undo") }
+ def undo() { throw new UnsupportedOperationException("cannot undo") }
})
}
}
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index 00087f8f61..ceb23d25c3 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -62,7 +62,7 @@ trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with
abstract override def clear(): Unit = {
super.clear
publish(new Reset with Undoable {
- def undo: Unit = throw new UnsupportedOperationException("cannot undo")
+ def undo(): Unit = throw new UnsupportedOperationException("cannot undo")
})
}
}
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index 6bb1e45cec..1b375802a5 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -46,7 +46,7 @@ trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable]
abstract override def clear(): Unit = {
super.clear
publish(new Reset with Undoable {
- def undo: Unit = throw new UnsupportedOperationException("cannot undo")
+ def undo(): Unit = throw new UnsupportedOperationException("cannot undo")
})
}
}
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index f558f68159..72e729f370 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -80,7 +80,7 @@ extends Map[Key, Value]
h ^ (h >>> 7) ^ (h >>> 4);
}
- private[this] def growTable = {
+ private[this] def growTable() = {
val oldSize = mask + 1;
val newSize = 4 * oldSize;
val oldTable = table;
@@ -179,7 +179,7 @@ extends Map[Key, Value]
var index = 0;
val initialModCount = modCount;
- private[this] def advance {
+ private[this] def advance() {
if (initialModCount != modCount) sys.error("Concurrent modification");
while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1;
}
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 2c03f329a7..31113797dd 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -253,7 +253,7 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*/
override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator
- // def printstate {
+ // def printstate() {
// println("-----------------------")
// println("Size: " + resarr.p_size0)
// println("Internal array: " + resarr.p_array.toList)
diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala
index 2c51f70b8b..922824ddf0 100644
--- a/src/library/scala/collection/mutable/RevertibleHistory.scala
+++ b/src/library/scala/collection/mutable/RevertibleHistory.scala
@@ -28,7 +28,7 @@ class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Und
/** Rollback the full history.
*/
- def undo: Unit = {
+ def undo(): Unit = {
val old = log.toList.reverse
clear
old.foreach { case (sub, event) => event.undo }
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index b32ea108f4..7c176eeee4 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -842,11 +842,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
extends NonDivisibleTask[R, Composite[FR, SR, R, First, Second]] {
def combineResults(fr: FR, sr: SR): R
@volatile var result: R = null.asInstanceOf[R]
- private[parallel] override def signalAbort {
+ private[parallel] override def signalAbort() {
ft.signalAbort
st.signalAbort
}
- protected def mergeSubtasks {
+ protected def mergeSubtasks() {
ft mergeThrowables st
if (throwable eq null) result = combineResults(ft.result, st.result)
}
@@ -883,7 +883,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def leaf(prevr: Option[R1]) = {
result = map(executeAndWaitResult(inner))
}
- private[parallel] override def signalAbort {
+ private[parallel] override def signalAbort() {
inner.signalAbort
}
override def requiresStrictSplitters = inner.requiresStrictSplitters
@@ -1395,7 +1395,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
}
import collection.DebugUtils._
- private[parallel] def printDebugBuffer = println(buildString {
+ private[parallel] def printDebugBuffer() = println(buildString {
append =>
for (s <- debugBuffer) {
append(s)
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index 497e0c638a..873291fb2d 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -57,7 +57,7 @@ trait Tasks {
// exception handling mechanism
@volatile var throwable: Throwable = null
- def forwardThrowable = if (throwable != null) throw throwable
+ def forwardThrowable() = if (throwable != null) throw throwable
// tries to do the leaf computation, storing the possible exception
private[parallel] def tryLeaf(lastres: Option[R]) {
@@ -101,7 +101,7 @@ trait Tasks {
}
// override in concrete task implementations to signal abort to other tasks
- private[parallel] def signalAbort {}
+ private[parallel] def signalAbort() {}
}
trait TaskImpl[R, +Tp] {
@@ -110,11 +110,11 @@ trait Tasks {
def split: Seq[TaskImpl[R, Tp]]
/** Code that gets called after the task gets started - it may spawn other tasks instead of calling `leaf`. */
- def compute
+ def compute()
/** Start task. */
- def start
+ def start()
/** Wait for task to finish. */
- def sync
+ def sync()
/** Try to cancel the task.
* @return `true` if cancellation is successful.
*/
@@ -126,7 +126,7 @@ trait Tasks {
*
* This method may be overridden.
*/
- def release {}
+ def release() {}
}
protected def newTaskImpl[R, Tp](b: Task[R, Tp]): TaskImpl[R, Tp]
@@ -161,9 +161,9 @@ trait AdaptiveWorkStealingTasks extends Tasks {
def split: Seq[TaskImpl[R, Tp]]
- def compute = if (body.shouldSplitFurther) internal else body.tryLeaf(None)
+ def compute() = if (body.shouldSplitFurther) internal else body.tryLeaf(None)
- def internal = {
+ def internal() = {
var last = spawnSubtasks()
last.body.tryLeaf(None)
@@ -231,7 +231,7 @@ trait ThreadPoolTasks extends Tasks {
@volatile var owned = false
@volatile var completed = false
- def start = synchronized {
+ def start() = synchronized {
// debuglog("Starting " + body)
// utb: future = executor.submit(this)
executor.synchronized {
@@ -239,7 +239,7 @@ trait ThreadPoolTasks extends Tasks {
executor.submit(this)
}
}
- def sync = synchronized {
+ def sync() = synchronized {
// debuglog("Syncing on " + body)
// utb: future.get()
executor.synchronized {
@@ -293,11 +293,11 @@ trait ThreadPoolTasks extends Tasks {
def queue = executor.getQueue.asInstanceOf[LinkedBlockingQueue[Runnable]]
@volatile var totaltasks = 0
- private def incrTasks = synchronized {
+ private def incrTasks() = synchronized {
totaltasks += 1
}
- private def decrTasks = synchronized {
+ private def decrTasks() = synchronized {
totaltasks -= 1
}
@@ -361,12 +361,12 @@ trait FutureThreadPoolTasks extends Tasks {
trait TaskImpl[R, +Tp] extends Runnable with super.TaskImpl[R, Tp] {
@volatile var future: Future[_] = null
- def start = {
+ def start() = {
executor.synchronized {
future = executor.submit(this)
}
}
- def sync = future.get
+ def sync() = future.get
def tryCancel = false
def run = {
compute
@@ -435,8 +435,8 @@ trait HavingForkJoinPool {
trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
trait TaskImpl[R, +Tp] extends RecursiveAction with super.TaskImpl[R, Tp] {
- def start = fork
- def sync = join
+ def start() = fork
+ def sync() = join
def tryCancel = tryUnfork
}
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index b9b7cbd69d..85084a945f 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -115,7 +115,7 @@ self =>
override def toString = "HashTrieIterator(" + sz + ")"
}
- private[parallel] def printDebugInfo {
+ private[parallel] def printDebugInfo() {
println("Parallel hash trie")
println("Top level inner trie type: " + trie.getClass)
trie match {
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index a4dc9b4a14..dc583fb4e7 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -56,7 +56,7 @@ extends Combiner[T, ParArray[T]] {
new ParArray(arrayseq)
}
- def clear {
+ def clear() {
buff.clear
}
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index b1c6c69145..8ee356e95f 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -67,7 +67,7 @@ object Platform {
def currentTime: Long = System.currentTimeMillis()
@inline
- def collectGarbage: Unit = System.gc()
+ def collectGarbage(): Unit = System.gc()
/** The name of the default character set encoding as a string */
@inline
diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala
index 5494ff8c68..08c9f6cd63 100644
--- a/src/library/scala/concurrent/Lock.scala
+++ b/src/library/scala/concurrent/Lock.scala
@@ -18,12 +18,12 @@ package scala.concurrent
class Lock {
var available = true
- def acquire = synchronized {
+ def acquire() = synchronized {
while (!available) wait()
available = false
}
- def release = synchronized {
+ def release() = synchronized {
available = true
notify()
}
diff --git a/src/library/scala/concurrent/pilib.scala b/src/library/scala/concurrent/pilib.scala
index cad8aecf86..7a3758b9c6 100644
--- a/src/library/scala/concurrent/pilib.scala
+++ b/src/library/scala/concurrent/pilib.scala
@@ -43,13 +43,13 @@ object pilib {
abstract class Spawn {
def <(p: => Unit): Spawn
def |(p: => Unit): Spawn
- def > : Unit
+ def > (): Unit
}
val spawn = new Spawn {
//object spawn extends Spawn { // BUG !
def <(p: => Unit): Spawn = { scala.concurrent.ops.spawn(p); this }
def |(p: => Unit): Spawn = { scala.concurrent.ops.spawn(p); this }
- def > : Unit = ()
+ def > (): Unit = ()
}
/////////////////////////// GUARDED PROCESSES //////////////////////////
diff --git a/src/library/scala/reflect/ScalaBeanInfo.scala b/src/library/scala/reflect/ScalaBeanInfo.scala
index 6b026907f4..bcb76c38bc 100644
--- a/src/library/scala/reflect/ScalaBeanInfo.scala
+++ b/src/library/scala/reflect/ScalaBeanInfo.scala
@@ -34,7 +34,7 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
// override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass)
- private def init {
+ private def init() {
var i = 0;
while (i < props.length) {
pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2))
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index 857c1af15e..b7549eeb06 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -28,7 +28,7 @@ private[process] trait ProcessImpl {
private[process] object Future {
def apply[T](f: => T): () => T = {
val result = new SyncVar[Either[Throwable, T]]
- def run: Unit =
+ def run(): Unit =
try result set Right(f)
catch { case e: Exception => result set Left(e) }
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
index 34f769be11..c6ab992306 100644
--- a/src/library/scala/testing/Benchmark.scala
+++ b/src/library/scala/testing/Benchmark.scala
@@ -75,7 +75,7 @@ trait Benchmark {
* should not be measured. This method is run before each call to the
* benchmark payload, 'run'.
*/
- def setUp {
+ def setUp() {
}
/** Perform cleanup operations after each 'run'. For micro benchmarks,
@@ -84,7 +84,7 @@ trait Benchmark {
* write the results to a file. The execution time of this method is not
* measured.
*/
- def tearDown {
+ def tearDown() {
}
/** a string that is written at the beginning of the output line
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index 139fcc0bd6..c436df9f8f 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -57,7 +57,7 @@ class Breaks {
* @note this might be different than the statically closest enclosing
* block!
*/
- def break { throw breakException }
+ def break() { throw breakException }
}
/** An object that can be used for the break control abstraction.
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
index 494cb04166..a4f457e1bf 100644
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ b/src/library/scala/util/parsing/ast/Binders.scala
@@ -172,8 +172,8 @@ trait Binders extends AbstractSyntax with Mappable {
* the binding in the returned scope also does, and thus the check that all variables are bound is deferred until this scope is left **/
def nested: Scope[binderType] = this // TODO
- def onEnter {}
- def onLeft {}
+ def onEnter() {}
+ def onLeft() {}
}
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index 90255e966a..2d87bc0764 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -73,7 +73,7 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
sys.error("unexpected token:" + token2string(token) );
}
// sopt ::= S?
- def sOpt = if( token == S ) nextToken;
+ def sOpt() = if( token == S ) nextToken;
// (' S? mixed ::= '#PCDATA' S? ')'
// | '#PCDATA' (S? '|' S? atom)* S? ')*'
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 20c0204e51..82a8d1af2f 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -33,7 +33,7 @@ class Scanner extends Tokens with parsing.TokenTests {
}
/** scans the next token */
- final def nextToken {
+ final def nextToken() {
if (token != END) token = readToken
}
@@ -41,7 +41,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def isIdentChar = ( ('a' <= c && c <= 'z')
|| ('A' <= c && c <= 'Z'));
- final def next = if (it.hasNext) c = it.next else c = ENDCH
+ final def next() = if (it.hasNext) c = it.next else c = ENDCH
final def acc(d: Char) {
if (c == d) next else sys.error("expected '"+d+"' found '"+c+"' !");
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index d2174c2879..d9729e14e1 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -201,13 +201,13 @@ private[scala] trait MarkupParserCommon extends TokenTests {
def xToken(that: Seq[Char]) { that foreach xToken }
/** scan [S] '=' [S]*/
- def xEQ = { xSpaceOpt; xToken('='); xSpaceOpt }
+ def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt }
/** skip optional space S? */
- def xSpaceOpt = while (isSpace(ch) && !eof) nextch
+ def xSpaceOpt() = while (isSpace(ch) && !eof) nextch
/** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
- def xSpace =
+ def xSpace() =
if (isSpace(ch)) { nextch; xSpaceOpt }
else xHandleError(ch, "whitespace expected")
diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala
index 7f5b6b6310..1f355e29f1 100644
--- a/src/library/scala/xml/persistent/CachedFileStorage.scala
+++ b/src/library/scala/xml/persistent/CachedFileStorage.scala
@@ -33,7 +33,7 @@ extends java.lang.Thread with scala.util.logging.Logged {
*/
private var theFile: File = null
- private def switch = { theFile = if (theFile == file1) file2 else file1; }
+ private def switch() = { theFile = if (theFile == file1) file2 else file1; }
/** this storage modified since last modification check */
protected var dirty = false
@@ -82,7 +82,7 @@ extends java.lang.Thread with scala.util.logging.Logged {
}
/** saves the XML to file */
- private def save = if (this.dirty) {
+ private def save() = if (this.dirty) {
log("[save]\ndeleting "+theFile);
theFile.delete();
log("creating new "+theFile);
@@ -115,7 +115,7 @@ extends java.lang.Thread with scala.util.logging.Logged {
}
/** forces writing of contents to the file, even if there has not been any update. */
- def flush = {
+ def flush() = {
this.dirty = true;
save
}
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index 3902967626..2ffceab1b0 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -49,7 +49,7 @@ class XMLEventReader(src: Source) extends ProducerConsumerIterator[XMLEvent] {
// fails for whatever reason the iterator correctness is not impacted,
// only performance (because it will finish the entire XML document,
// or at least as much as it can fit in the queue.)
- def stop = {
+ def stop() = {
produce(POISON)
parserThread.interrupt()
}