aboutsummaryrefslogtreecommitdiff
path: root/lib/hadoop-0.20.0/c++
diff options
context:
space:
mode:
Diffstat (limited to 'lib/hadoop-0.20.0/c++')
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/Pipes.hh258
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/SerialUtils.hh169
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/StringUtils.hh81
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/TemplateFactory.hh96
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-amd64-64/lib/libhadooppipes.abin318270 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-amd64-64/lib/libhadooputils.abin88620 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/Pipes.hh258
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/SerialUtils.hh169
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/StringUtils.hh81
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/TemplateFactory.hh96
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhadooppipes.abin226390 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhadooputils.abin62576 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.la41
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.sobin41611 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so.0bin41611 -> 0 bytes
-rw-r--r--lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so.0.0.0bin41611 -> 0 bytes
16 files changed, 0 insertions, 1249 deletions
diff --git a/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/Pipes.hh b/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/Pipes.hh
deleted file mode 100644
index 9a785d966a..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/Pipes.hh
+++ /dev/null
@@ -1,258 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef HADOOP_PIPES_HH
-#define HADOOP_PIPES_HH
-
-#ifdef SWIG
-%module (directors="1") HadoopPipes
-%include "std_string.i"
-%feature("director") Mapper;
-%feature("director") Reducer;
-%feature("director") Partitioner;
-%feature("director") RecordReader;
-%feature("director") RecordWriter;
-%feature("director") Factory;
-#else
-#include <string>
-#endif
-
-namespace HadoopPipes {
-
-/**
- * This interface defines the interface between application code and the
- * foreign code interface to Hadoop Map/Reduce.
- */
-
-/**
- * A JobConf defines the properties for a job.
- */
-class JobConf {
-public:
- virtual bool hasKey(const std::string& key) const = 0;
- virtual const std::string& get(const std::string& key) const = 0;
- virtual int getInt(const std::string& key) const = 0;
- virtual float getFloat(const std::string& key) const = 0;
- virtual bool getBoolean(const std::string&key) const = 0;
- virtual ~JobConf() {}
-};
-
-/**
- * Task context provides the information about the task and job.
- */
-class TaskContext {
-public:
- /**
- * Counter to keep track of a property and its value.
- */
- class Counter {
- private:
- int id;
- public:
- Counter(int counterId) : id(counterId) {}
- Counter(const Counter& counter) : id(counter.id) {}
-
- int getId() const { return id; }
- };
-
- /**
- * Get the JobConf for the current task.
- */
- virtual const JobConf* getJobConf() = 0;
-
- /**
- * Get the current key.
- * @return the current key
- */
- virtual const std::string& getInputKey() = 0;
-
- /**
- * Get the current value.
- * @return the current value
- */
- virtual const std::string& getInputValue() = 0;
-
- /**
- * Generate an output record
- */
- virtual void emit(const std::string& key, const std::string& value) = 0;
-
- /**
- * Mark your task as having made progress without changing the status
- * message.
- */
- virtual void progress() = 0;
-
- /**
- * Set the status message and call progress.
- */
- virtual void setStatus(const std::string& status) = 0;
-
- /**
- * Register a counter with the given group and name.
- */
- virtual Counter*
- getCounter(const std::string& group, const std::string& name) = 0;
-
- /**
- * Increment the value of the counter with the given amount.
- */
- virtual void incrementCounter(const Counter* counter, uint64_t amount) = 0;
-
- virtual ~TaskContext() {}
-};
-
-class MapContext: public TaskContext {
-public:
-
- /**
- * Access the InputSplit of the mapper.
- */
- virtual const std::string& getInputSplit() = 0;
-
- /**
- * Get the name of the key class of the input to this task.
- */
- virtual const std::string& getInputKeyClass() = 0;
-
- /**
- * Get the name of the value class of the input to this task.
- */
- virtual const std::string& getInputValueClass() = 0;
-
-};
-
-class ReduceContext: public TaskContext {
-public:
- /**
- * Advance to the next value.
- */
- virtual bool nextValue() = 0;
-};
-
-class Closable {
-public:
- virtual void close() {}
- virtual ~Closable() {}
-};
-
-/**
- * The application's mapper class to do map.
- */
-class Mapper: public Closable {
-public:
- virtual void map(MapContext& context) = 0;
-};
-
-/**
- * The application's reducer class to do reduce.
- */
-class Reducer: public Closable {
-public:
- virtual void reduce(ReduceContext& context) = 0;
-};
-
-/**
- * User code to decide where each key should be sent.
- */
-class Partitioner {
-public:
- virtual int partition(const std::string& key, int numOfReduces) = 0;
- virtual ~Partitioner() {}
-};
-
-/**
- * For applications that want to read the input directly for the map function
- * they can define RecordReaders in C++.
- */
-class RecordReader: public Closable {
-public:
- virtual bool next(std::string& key, std::string& value) = 0;
-
- /**
- * The progress of the record reader through the split as a value between
- * 0.0 and 1.0.
- */
- virtual float getProgress() = 0;
-};
-
-/**
- * An object to write key/value pairs as they are emited from the reduce.
- */
-class RecordWriter: public Closable {
-public:
- virtual void emit(const std::string& key,
- const std::string& value) = 0;
-};
-
-/**
- * A factory to create the necessary application objects.
- */
-class Factory {
-public:
- virtual Mapper* createMapper(MapContext& context) const = 0;
- virtual Reducer* createReducer(ReduceContext& context) const = 0;
-
- /**
- * Create a combiner, if this application has one.
- * @return the new combiner or NULL, if one is not needed
- */
- virtual Reducer* createCombiner(MapContext& context) const {
- return NULL;
- }
-
- /**
- * Create an application partitioner object.
- * @return the new partitioner or NULL, if the default partitioner should be
- * used.
- */
- virtual Partitioner* createPartitioner(MapContext& context) const {
- return NULL;
- }
-
- /**
- * Create an application record reader.
- * @return the new RecordReader or NULL, if the Java RecordReader should be
- * used.
- */
- virtual RecordReader* createRecordReader(MapContext& context) const {
- return NULL;
- }
-
- /**
- * Create an application record writer.
- * @return the new RecordWriter or NULL, if the Java RecordWriter should be
- * used.
- */
- virtual RecordWriter* createRecordWriter(ReduceContext& context) const {
- return NULL;
- }
-
- virtual ~Factory() {}
-};
-
-/**
- * Run the assigned task in the framework.
- * The user's main function should set the various functions using the
- * set* functions above and then call this.
- * @return true, if the task succeeded.
- */
-bool runTask(const Factory& factory);
-
-}
-
-#endif
diff --git a/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/SerialUtils.hh b/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/SerialUtils.hh
deleted file mode 100644
index 16cbab65b2..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/SerialUtils.hh
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef HADOOP_SERIAL_UTILS_HH
-#define HADOOP_SERIAL_UTILS_HH
-
-#include <string>
-
-namespace HadoopUtils {
-
- /**
- * A simple exception class that records a message for the user.
- */
- class Error {
- private:
- std::string error;
- public:
-
- /**
- * Create an error object with the given message.
- */
- Error(const std::string& msg);
-
- /**
- * Construct an error object with the given message that was created on
- * the given file, line, and functino.
- */
- Error(const std::string& msg,
- const std::string& file, int line, const std::string& function);
-
- /**
- * Get the error message.
- */
- const std::string& getMessage() const;
- };
-
- /**
- * Check to make sure that the condition is true, and throw an exception
- * if it is not. The exception will contain the message and a description
- * of the source location.
- */
- #define HADOOP_ASSERT(CONDITION, MESSAGE) \
- { \
- if (!(CONDITION)) { \
- throw HadoopUtils::Error((MESSAGE), __FILE__, __LINE__, \
- __PRETTY_FUNCTION__); \
- } \
- }
-
- /**
- * An interface for an input stream.
- */
- class InStream {
- public:
- /**
- * Reads len bytes from the stream into the buffer.
- * @param buf the buffer to read into
- * @param buflen the length of the buffer
- * @throws Error if there are problems reading
- */
- virtual void read(void *buf, size_t len) = 0;
- virtual ~InStream() {}
- };
-
- /**
- * An interface for an output stream.
- */
- class OutStream {
- public:
- /**
- * Write the given buffer to the stream.
- * @param buf the data to write
- * @param len the number of bytes to write
- * @throws Error if there are problems writing
- */
- virtual void write(const void *buf, size_t len) = 0;
- /**
- * Flush the data to the underlying store.
- */
- virtual void flush() = 0;
- virtual ~OutStream() {}
- };
-
- /**
- * A class to read a file as a stream.
- */
- class FileInStream : public InStream {
- public:
- FileInStream();
- bool open(const std::string& name);
- bool open(FILE* file);
- void read(void *buf, size_t buflen);
- bool skip(size_t nbytes);
- bool close();
- virtual ~FileInStream();
- private:
- /**
- * The file to write to.
- */
- FILE *mFile;
- /**
- * Does is this class responsible for closing the FILE*?
- */
- bool isOwned;
- };
-
- /**
- * A class to write a stream to a file.
- */
- class FileOutStream: public OutStream {
- public:
-
- /**
- * Create a stream that isn't bound to anything.
- */
- FileOutStream();
-
- /**
- * Create the given file, potentially overwriting an existing file.
- */
- bool open(const std::string& name, bool overwrite);
- bool open(FILE* file);
- void write(const void* buf, size_t len);
- bool advance(size_t nbytes);
- void flush();
- bool close();
- virtual ~FileOutStream();
- private:
- FILE *mFile;
- bool isOwned;
- };
-
- /**
- * A stream that reads from a string.
- */
- class StringInStream: public InStream {
- public:
- StringInStream(const std::string& str);
- virtual void read(void *buf, size_t buflen);
- private:
- const std::string& buffer;
- std::string::const_iterator itr;
- };
-
- void serializeInt(int32_t t, OutStream& stream);
- int32_t deserializeInt(InStream& stream);
- void serializeLong(int64_t t, OutStream& stream);
- int64_t deserializeLong(InStream& stream);
- void serializeFloat(float t, OutStream& stream);
- float deserializeFloat(InStream& stream);
- void serializeString(const std::string& t, OutStream& stream);
- void deserializeString(std::string& t, InStream& stream);
-}
-
-#endif
diff --git a/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/StringUtils.hh b/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/StringUtils.hh
deleted file mode 100644
index 4720172725..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/StringUtils.hh
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef HADOOP_STRING_UTILS_HH
-#define HADOOP_STRING_UTILS_HH
-
-#include <stdint.h>
-#include <string>
-#include <vector>
-
-namespace HadoopUtils {
-
- /**
- * Convert an integer to a string.
- */
- std::string toString(int32_t x);
-
- /**
- * Convert a string to an integer.
- * @throws Error if the string is not a valid integer
- */
- int32_t toInt(const std::string& val);
-
- /**
- * Convert the string to a float.
- * @throws Error if the string is not a valid float
- */
- float toFloat(const std::string& val);
-
- /**
- * Convert the string to a boolean.
- * @throws Error if the string is not a valid boolean value
- */
- bool toBool(const std::string& val);
-
- /**
- * Get the current time in the number of milliseconds since 1970.
- */
- uint64_t getCurrentMillis();
-
- /**
- * Split a string into "words". Multiple deliminators are treated as a single
- * word break, so no zero-length words are returned.
- * @param str the string to split
- * @param separator a list of characters that divide words
- */
- std::vector<std::string> splitString(const std::string& str,
- const char* separator);
-
- /**
- * Quote a string to avoid "\", non-printable characters, and the
- * deliminators.
- * @param str the string to quote
- * @param deliminators the set of characters to always quote
- */
- std::string quoteString(const std::string& str,
- const char* deliminators);
-
- /**
- * Unquote the given string to return the original string.
- * @param str the string to unquote
- */
- std::string unquoteString(const std::string& str);
-
-}
-
-#endif
diff --git a/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/TemplateFactory.hh b/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/TemplateFactory.hh
deleted file mode 100644
index 22e10ae56f..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-amd64-64/include/hadoop/TemplateFactory.hh
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef HADOOP_PIPES_TEMPLATE_FACTORY_HH
-#define HADOOP_PIPES_TEMPLATE_FACTORY_HH
-
-namespace HadoopPipes {
-
- template <class mapper, class reducer>
- class TemplateFactory2: public Factory {
- public:
- Mapper* createMapper(MapContext& context) const {
- return new mapper(context);
- }
- Reducer* createReducer(ReduceContext& context) const {
- return new reducer(context);
- }
- };
-
- template <class mapper, class reducer, class partitioner>
- class TemplateFactory3: public TemplateFactory2<mapper,reducer> {
- public:
- Partitioner* createPartitioner(MapContext& context) const {
- return new partitioner(context);
- }
- };
-
- template <class mapper, class reducer>
- class TemplateFactory3<mapper, reducer, void>
- : public TemplateFactory2<mapper,reducer> {
- };
-
- template <class mapper, class reducer, class partitioner, class combiner>
- class TemplateFactory4
- : public TemplateFactory3<mapper,reducer,partitioner>{
- public:
- Reducer* createCombiner(MapContext& context) const {
- return new combiner(context);
- }
- };
-
- template <class mapper, class reducer, class partitioner>
- class TemplateFactory4<mapper,reducer,partitioner,void>
- : public TemplateFactory3<mapper,reducer,partitioner>{
- };
-
- template <class mapper, class reducer, class partitioner,
- class combiner, class recordReader>
- class TemplateFactory5
- : public TemplateFactory4<mapper,reducer,partitioner,combiner>{
- public:
- RecordReader* createRecordReader(MapContext& context) const {
- return new recordReader(context);
- }
- };
-
- template <class mapper, class reducer, class partitioner,class combiner>
- class TemplateFactory5<mapper,reducer,partitioner,combiner,void>
- : public TemplateFactory4<mapper,reducer,partitioner,combiner>{
- };
-
- template <class mapper, class reducer, class partitioner=void,
- class combiner=void, class recordReader=void,
- class recordWriter=void>
- class TemplateFactory
- : public TemplateFactory5<mapper,reducer,partitioner,combiner,recordReader>{
- public:
- RecordWriter* createRecordWriter(ReduceContext& context) const {
- return new recordWriter(context);
- }
- };
-
- template <class mapper, class reducer, class partitioner,
- class combiner, class recordReader>
- class TemplateFactory<mapper, reducer, partitioner, combiner, recordReader,
- void>
- : public TemplateFactory5<mapper,reducer,partitioner,combiner,recordReader>{
- };
-
-}
-
-#endif
diff --git a/lib/hadoop-0.20.0/c++/Linux-amd64-64/lib/libhadooppipes.a b/lib/hadoop-0.20.0/c++/Linux-amd64-64/lib/libhadooppipes.a
deleted file mode 100644
index be303140cb..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-amd64-64/lib/libhadooppipes.a
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/c++/Linux-amd64-64/lib/libhadooputils.a b/lib/hadoop-0.20.0/c++/Linux-amd64-64/lib/libhadooputils.a
deleted file mode 100644
index 8a0aded98e..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-amd64-64/lib/libhadooputils.a
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/Pipes.hh b/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/Pipes.hh
deleted file mode 100644
index 9a785d966a..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/Pipes.hh
+++ /dev/null
@@ -1,258 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef HADOOP_PIPES_HH
-#define HADOOP_PIPES_HH
-
-#ifdef SWIG
-%module (directors="1") HadoopPipes
-%include "std_string.i"
-%feature("director") Mapper;
-%feature("director") Reducer;
-%feature("director") Partitioner;
-%feature("director") RecordReader;
-%feature("director") RecordWriter;
-%feature("director") Factory;
-#else
-#include <string>
-#endif
-
-namespace HadoopPipes {
-
-/**
- * This interface defines the interface between application code and the
- * foreign code interface to Hadoop Map/Reduce.
- */
-
-/**
- * A JobConf defines the properties for a job.
- */
-class JobConf {
-public:
- virtual bool hasKey(const std::string& key) const = 0;
- virtual const std::string& get(const std::string& key) const = 0;
- virtual int getInt(const std::string& key) const = 0;
- virtual float getFloat(const std::string& key) const = 0;
- virtual bool getBoolean(const std::string&key) const = 0;
- virtual ~JobConf() {}
-};
-
-/**
- * Task context provides the information about the task and job.
- */
-class TaskContext {
-public:
- /**
- * Counter to keep track of a property and its value.
- */
- class Counter {
- private:
- int id;
- public:
- Counter(int counterId) : id(counterId) {}
- Counter(const Counter& counter) : id(counter.id) {}
-
- int getId() const { return id; }
- };
-
- /**
- * Get the JobConf for the current task.
- */
- virtual const JobConf* getJobConf() = 0;
-
- /**
- * Get the current key.
- * @return the current key
- */
- virtual const std::string& getInputKey() = 0;
-
- /**
- * Get the current value.
- * @return the current value
- */
- virtual const std::string& getInputValue() = 0;
-
- /**
- * Generate an output record
- */
- virtual void emit(const std::string& key, const std::string& value) = 0;
-
- /**
- * Mark your task as having made progress without changing the status
- * message.
- */
- virtual void progress() = 0;
-
- /**
- * Set the status message and call progress.
- */
- virtual void setStatus(const std::string& status) = 0;
-
- /**
- * Register a counter with the given group and name.
- */
- virtual Counter*
- getCounter(const std::string& group, const std::string& name) = 0;
-
- /**
- * Increment the value of the counter with the given amount.
- */
- virtual void incrementCounter(const Counter* counter, uint64_t amount) = 0;
-
- virtual ~TaskContext() {}
-};
-
-class MapContext: public TaskContext {
-public:
-
- /**
- * Access the InputSplit of the mapper.
- */
- virtual const std::string& getInputSplit() = 0;
-
- /**
- * Get the name of the key class of the input to this task.
- */
- virtual const std::string& getInputKeyClass() = 0;
-
- /**
- * Get the name of the value class of the input to this task.
- */
- virtual const std::string& getInputValueClass() = 0;
-
-};
-
-class ReduceContext: public TaskContext {
-public:
- /**
- * Advance to the next value.
- */
- virtual bool nextValue() = 0;
-};
-
-class Closable {
-public:
- virtual void close() {}
- virtual ~Closable() {}
-};
-
-/**
- * The application's mapper class to do map.
- */
-class Mapper: public Closable {
-public:
- virtual void map(MapContext& context) = 0;
-};
-
-/**
- * The application's reducer class to do reduce.
- */
-class Reducer: public Closable {
-public:
- virtual void reduce(ReduceContext& context) = 0;
-};
-
-/**
- * User code to decide where each key should be sent.
- */
-class Partitioner {
-public:
- virtual int partition(const std::string& key, int numOfReduces) = 0;
- virtual ~Partitioner() {}
-};
-
-/**
- * For applications that want to read the input directly for the map function
- * they can define RecordReaders in C++.
- */
-class RecordReader: public Closable {
-public:
- virtual bool next(std::string& key, std::string& value) = 0;
-
- /**
- * The progress of the record reader through the split as a value between
- * 0.0 and 1.0.
- */
- virtual float getProgress() = 0;
-};
-
-/**
- * An object to write key/value pairs as they are emited from the reduce.
- */
-class RecordWriter: public Closable {
-public:
- virtual void emit(const std::string& key,
- const std::string& value) = 0;
-};
-
-/**
- * A factory to create the necessary application objects.
- */
-class Factory {
-public:
- virtual Mapper* createMapper(MapContext& context) const = 0;
- virtual Reducer* createReducer(ReduceContext& context) const = 0;
-
- /**
- * Create a combiner, if this application has one.
- * @return the new combiner or NULL, if one is not needed
- */
- virtual Reducer* createCombiner(MapContext& context) const {
- return NULL;
- }
-
- /**
- * Create an application partitioner object.
- * @return the new partitioner or NULL, if the default partitioner should be
- * used.
- */
- virtual Partitioner* createPartitioner(MapContext& context) const {
- return NULL;
- }
-
- /**
- * Create an application record reader.
- * @return the new RecordReader or NULL, if the Java RecordReader should be
- * used.
- */
- virtual RecordReader* createRecordReader(MapContext& context) const {
- return NULL;
- }
-
- /**
- * Create an application record writer.
- * @return the new RecordWriter or NULL, if the Java RecordWriter should be
- * used.
- */
- virtual RecordWriter* createRecordWriter(ReduceContext& context) const {
- return NULL;
- }
-
- virtual ~Factory() {}
-};
-
-/**
- * Run the assigned task in the framework.
- * The user's main function should set the various functions using the
- * set* functions above and then call this.
- * @return true, if the task succeeded.
- */
-bool runTask(const Factory& factory);
-
-}
-
-#endif
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/SerialUtils.hh b/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/SerialUtils.hh
deleted file mode 100644
index 16cbab65b2..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/SerialUtils.hh
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef HADOOP_SERIAL_UTILS_HH
-#define HADOOP_SERIAL_UTILS_HH
-
-#include <string>
-
-namespace HadoopUtils {
-
- /**
- * A simple exception class that records a message for the user.
- */
- class Error {
- private:
- std::string error;
- public:
-
- /**
- * Create an error object with the given message.
- */
- Error(const std::string& msg);
-
- /**
- * Construct an error object with the given message that was created on
- * the given file, line, and functino.
- */
- Error(const std::string& msg,
- const std::string& file, int line, const std::string& function);
-
- /**
- * Get the error message.
- */
- const std::string& getMessage() const;
- };
-
- /**
- * Check to make sure that the condition is true, and throw an exception
- * if it is not. The exception will contain the message and a description
- * of the source location.
- */
- #define HADOOP_ASSERT(CONDITION, MESSAGE) \
- { \
- if (!(CONDITION)) { \
- throw HadoopUtils::Error((MESSAGE), __FILE__, __LINE__, \
- __PRETTY_FUNCTION__); \
- } \
- }
-
- /**
- * An interface for an input stream.
- */
- class InStream {
- public:
- /**
- * Reads len bytes from the stream into the buffer.
- * @param buf the buffer to read into
- * @param buflen the length of the buffer
- * @throws Error if there are problems reading
- */
- virtual void read(void *buf, size_t len) = 0;
- virtual ~InStream() {}
- };
-
- /**
- * An interface for an output stream.
- */
- class OutStream {
- public:
- /**
- * Write the given buffer to the stream.
- * @param buf the data to write
- * @param len the number of bytes to write
- * @throws Error if there are problems writing
- */
- virtual void write(const void *buf, size_t len) = 0;
- /**
- * Flush the data to the underlying store.
- */
- virtual void flush() = 0;
- virtual ~OutStream() {}
- };
-
- /**
- * A class to read a file as a stream.
- */
- class FileInStream : public InStream {
- public:
- FileInStream();
- bool open(const std::string& name);
- bool open(FILE* file);
- void read(void *buf, size_t buflen);
- bool skip(size_t nbytes);
- bool close();
- virtual ~FileInStream();
- private:
- /**
- * The file to write to.
- */
- FILE *mFile;
- /**
- * Does is this class responsible for closing the FILE*?
- */
- bool isOwned;
- };
-
- /**
- * A class to write a stream to a file.
- */
- class FileOutStream: public OutStream {
- public:
-
- /**
- * Create a stream that isn't bound to anything.
- */
- FileOutStream();
-
- /**
- * Create the given file, potentially overwriting an existing file.
- */
- bool open(const std::string& name, bool overwrite);
- bool open(FILE* file);
- void write(const void* buf, size_t len);
- bool advance(size_t nbytes);
- void flush();
- bool close();
- virtual ~FileOutStream();
- private:
- FILE *mFile;
- bool isOwned;
- };
-
- /**
- * A stream that reads from a string.
- */
- class StringInStream: public InStream {
- public:
- StringInStream(const std::string& str);
- virtual void read(void *buf, size_t buflen);
- private:
- const std::string& buffer;
- std::string::const_iterator itr;
- };
-
- void serializeInt(int32_t t, OutStream& stream);
- int32_t deserializeInt(InStream& stream);
- void serializeLong(int64_t t, OutStream& stream);
- int64_t deserializeLong(InStream& stream);
- void serializeFloat(float t, OutStream& stream);
- float deserializeFloat(InStream& stream);
- void serializeString(const std::string& t, OutStream& stream);
- void deserializeString(std::string& t, InStream& stream);
-}
-
-#endif
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/StringUtils.hh b/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/StringUtils.hh
deleted file mode 100644
index 4720172725..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/StringUtils.hh
+++ /dev/null
@@ -1,81 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef HADOOP_STRING_UTILS_HH
-#define HADOOP_STRING_UTILS_HH
-
-#include <stdint.h>
-#include <string>
-#include <vector>
-
-namespace HadoopUtils {
-
- /**
- * Convert an integer to a string.
- */
- std::string toString(int32_t x);
-
- /**
- * Convert a string to an integer.
- * @throws Error if the string is not a valid integer
- */
- int32_t toInt(const std::string& val);
-
- /**
- * Convert the string to a float.
- * @throws Error if the string is not a valid float
- */
- float toFloat(const std::string& val);
-
- /**
- * Convert the string to a boolean.
- * @throws Error if the string is not a valid boolean value
- */
- bool toBool(const std::string& val);
-
- /**
- * Get the current time in the number of milliseconds since 1970.
- */
- uint64_t getCurrentMillis();
-
- /**
- * Split a string into "words". Multiple deliminators are treated as a single
- * word break, so no zero-length words are returned.
- * @param str the string to split
- * @param separator a list of characters that divide words
- */
- std::vector<std::string> splitString(const std::string& str,
- const char* separator);
-
- /**
- * Quote a string to avoid "\", non-printable characters, and the
- * deliminators.
- * @param str the string to quote
- * @param deliminators the set of characters to always quote
- */
- std::string quoteString(const std::string& str,
- const char* deliminators);
-
- /**
- * Unquote the given string to return the original string.
- * @param str the string to unquote
- */
- std::string unquoteString(const std::string& str);
-
-}
-
-#endif
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/TemplateFactory.hh b/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/TemplateFactory.hh
deleted file mode 100644
index 22e10ae56f..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/include/hadoop/TemplateFactory.hh
+++ /dev/null
@@ -1,96 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef HADOOP_PIPES_TEMPLATE_FACTORY_HH
-#define HADOOP_PIPES_TEMPLATE_FACTORY_HH
-
-namespace HadoopPipes {
-
- template <class mapper, class reducer>
- class TemplateFactory2: public Factory {
- public:
- Mapper* createMapper(MapContext& context) const {
- return new mapper(context);
- }
- Reducer* createReducer(ReduceContext& context) const {
- return new reducer(context);
- }
- };
-
- template <class mapper, class reducer, class partitioner>
- class TemplateFactory3: public TemplateFactory2<mapper,reducer> {
- public:
- Partitioner* createPartitioner(MapContext& context) const {
- return new partitioner(context);
- }
- };
-
- template <class mapper, class reducer>
- class TemplateFactory3<mapper, reducer, void>
- : public TemplateFactory2<mapper,reducer> {
- };
-
- template <class mapper, class reducer, class partitioner, class combiner>
- class TemplateFactory4
- : public TemplateFactory3<mapper,reducer,partitioner>{
- public:
- Reducer* createCombiner(MapContext& context) const {
- return new combiner(context);
- }
- };
-
- template <class mapper, class reducer, class partitioner>
- class TemplateFactory4<mapper,reducer,partitioner,void>
- : public TemplateFactory3<mapper,reducer,partitioner>{
- };
-
- template <class mapper, class reducer, class partitioner,
- class combiner, class recordReader>
- class TemplateFactory5
- : public TemplateFactory4<mapper,reducer,partitioner,combiner>{
- public:
- RecordReader* createRecordReader(MapContext& context) const {
- return new recordReader(context);
- }
- };
-
- template <class mapper, class reducer, class partitioner,class combiner>
- class TemplateFactory5<mapper,reducer,partitioner,combiner,void>
- : public TemplateFactory4<mapper,reducer,partitioner,combiner>{
- };
-
- template <class mapper, class reducer, class partitioner=void,
- class combiner=void, class recordReader=void,
- class recordWriter=void>
- class TemplateFactory
- : public TemplateFactory5<mapper,reducer,partitioner,combiner,recordReader>{
- public:
- RecordWriter* createRecordWriter(ReduceContext& context) const {
- return new recordWriter(context);
- }
- };
-
- template <class mapper, class reducer, class partitioner,
- class combiner, class recordReader>
- class TemplateFactory<mapper, reducer, partitioner, combiner, recordReader,
- void>
- : public TemplateFactory5<mapper,reducer,partitioner,combiner,recordReader>{
- };
-
-}
-
-#endif
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhadooppipes.a b/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhadooppipes.a
deleted file mode 100644
index 73debcec92..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhadooppipes.a
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhadooputils.a b/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhadooputils.a
deleted file mode 100644
index 6753169b8a..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhadooputils.a
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.la b/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.la
deleted file mode 100644
index b6ce94229f..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.la
+++ /dev/null
@@ -1,41 +0,0 @@
-# libhdfs.la - a libtool library file
-# Generated by ltmain.sh (GNU libtool) 2.2
-#
-# Please DO NOT delete this file!
-# It is necessary for linking the library.
-
-# The name that we can dlopen(3).
-dlname='libhdfs.so.0'
-
-# Names of this library.
-library_names='libhdfs.so.0.0.0 libhdfs.so.0 libhdfs.so'
-
-# The name of the static archive.
-old_library=''
-
-# Linker flags that can not go in dependency_libs.
-inherited_linker_flags=''
-
-# Libraries that this one depends upon.
-dependency_libs=' -L/home/hadoopqa/tools/java/latest1.6-32/jre/lib/i386/server -ljvm -ldl -lpthread'
-
-# Names of additional weak libraries provided by this library
-weak_library_names=''
-
-# Version information for libhdfs.
-current=0
-age=0
-revision=0
-
-# Is this an already installed library?
-installed=yes
-
-# Should we warn about portability when linking against -modules?
-shouldnotlink=no
-
-# Files to dlopen/dlpreopen
-dlopen=''
-dlpreopen=''
-
-# Directory that this library needs to be installed in:
-libdir='/home/ndaley/hadoop/branch-0.20/build/c++/Linux-i386-32/lib'
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so b/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so
deleted file mode 100644
index 358d582d43..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so.0 b/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so.0
deleted file mode 100644
index 358d582d43..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so.0
+++ /dev/null
Binary files differ
diff --git a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so.0.0.0 b/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so.0.0.0
deleted file mode 100644
index 358d582d43..0000000000
--- a/lib/hadoop-0.20.0/c++/Linux-i386-32/lib/libhdfs.so.0.0.0
+++ /dev/null
Binary files differ