/etc/profile
atau
/etc/environment
#
# Check which shell is reading this file
#
# Set Hadoop-related environment variables
export HADOOP_HOME='/opt/hadoop-1.2.0'
# Set JAVA_HOME (we will also configure JAVA_HOME directly for Hadoop later on)
export JAVA_HOME='/usr/lib64/jvm/jre-1.7.0-openjdk'
# Add Hadoop bin/ directory to PATH
export PATH=$PATH:$HADOOP_HOME/bin
### Komodo Program
export KOMODO_HOME='/opt/komodo'
export PATH=$PATH:$KOMODO_HOME/bin
## PIG Setting ###
export PIG_HOME='/opt/pig-0.12.1'
export PATH=$PATH:$PIG_HOME/bin
## HIVE Setting ###
export HIVE_HOME='/opt/hive-0.11.0-bin'
export PATH=$PATH:$HIVE_HOME/bin
Socialabel
[Facebook]
Friday, April 18, 2014
Friday, April 4, 2014
SSH: Error: cannot open display: :0
$ xhost + access control disabled, clients can connect from any host
$ ssh username@hostname -X ##Disable
$ ssh username@hostname -Y ##Enable
$ export DISPLAY=:0.0
Thursday, April 3, 2014
Pydoop: #include
-I/usr/lib/jvm/jre-1.7.0-openjdk/include/linux -Isrc/hadoop-1.2.1.patched/libhdfs -I/usr/include/python2.7 -c src/hdfs_fs.cpp -o build/temp.linux-i686-2.7/src/hdfs_fs.o
In file included from src/hdfs_common.hpp:25:0,
from src/hdfs_fs.hpp:24,
from src/hdfs_fs.cpp:21:
src/hadoop-1.2.1.patched/libhdfs/hdfs.h:33:17: fatal error: jni.h: No such file or directory
#include <jni.h>
Solusi:
export JAVA_HOME="/usr/lib/jvm/java-1.7.0-openjdk-1.7.0/"
export PATH="$JAVA_HOME/bin:$PATH"
In file included from src/hdfs_common.hpp:25:0,
from src/hdfs_fs.hpp:24,
from src/hdfs_fs.cpp:21:
src/hadoop-1.2.1.patched/libhdfs/hdfs.h:33:17: fatal error: jni.h: No such file or directory
#include <jni.h>
Solusi:
export JAVA_HOME="/usr/lib/jvm/java-1.7.0-openjdk-1.7.0/"
export PATH="$JAVA_HOME/bin:$PATH"
Pydoop: fatal error: openssl/hmac.h: No such file or directory
-Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DOPENSSL_LOAD_CONF -fPIC -Isrc/hadoop-1.2.1.patched/pipes/api -Isrc/hadoop-1.2.1.patched/utils/api -I/usr/include/python2.7 -c src/hadoop-1.2.1.patched/pipes/impl/HadoopPipes.cc -o build/temp.linux-i686-2.7/src/hadoop-1.2.1.patched/pipes/impl/HadoopPipes.o
src/hadoop-1.2.1.patched/pipes/impl/HadoopPipes.cc:39:26: fatal error: openssl/hmac.h: No such file or directory
#include <openssl/hmac.h>
Solusi:
zypper install libopenssl-devel
src/hadoop-1.2.1.patched/pipes/impl/HadoopPipes.cc:39:26: fatal error: openssl/hmac.h: No such file or directory
#include <openssl/hmac.h>
Solusi:
zypper install libopenssl-devel
Wednesday, April 2, 2014
$HADOOP_HOME is deprecated
$HADOOP_HOME is deprecated
Solusi:
export HADOOP_HOME_WARN_SUPPRESS="TRUE"
Pydoop : gcc: error trying to exec 'cc1plus': execvp: No such file or directory
...
gcc -pthread -fno-strict-aliasing -fomit-frame-pointer -fmessage-length=0 -grecord-gcc-switches -fstack-protector -O2 -Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DNDEBUG -fomit-frame-pointer -fmessage-length=0 -grecord-gcc-switches -fstack-protector -O2 -Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DOPENSSL_LOAD_CONF -fPIC -Isrc/hadoop-1.2.1.patched/pipes/api -Isrc/hadoop-1.2.1.patched/utils/api -I/usr/include/python2.7 -c src/pipes.cpp -o build/temp.linux-i686-2.7/src/pipes.o
gcc: error trying to exec 'cc1plus': execvp: No such file or directory
error: command 'gcc' failed with exit status 1
Solusi:
zypper install gcc-c++
gcc -pthread -fno-strict-aliasing -fomit-frame-pointer -fmessage-length=0 -grecord-gcc-switches -fstack-protector -O2 -Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DNDEBUG -fomit-frame-pointer -fmessage-length=0 -grecord-gcc-switches -fstack-protector -O2 -Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DOPENSSL_LOAD_CONF -fPIC -Isrc/hadoop-1.2.1.patched/pipes/api -Isrc/hadoop-1.2.1.patched/utils/api -I/usr/include/python2.7 -c src/pipes.cpp -o build/temp.linux-i686-2.7/src/pipes.o
gcc: error trying to exec 'cc1plus': execvp: No such file or directory
error: command 'gcc' failed with exit status 1
Solusi:
zypper install gcc-c++
Tuesday, April 1, 2014
Hadoop Streaming Eksekusi
bin/hadoop jar contrib/streaming/hadoop-*streaming*.jar -mapper map.py -reducer red.py -input /user/Hery/data -output /user/Hery/out-data
Hadoop Py: #include "Python.h"
creating build/temp.linux-i686-2.7/hadoopy
gcc -pthread -fno-strict-aliasing -fomit-frame-pointer -fmessage-length=0 -grecord-gcc-switches -fstack-protector -O2 -Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DNDEBUG -fomit-frame-pointer -fmessage-length=0 -grecord-gcc-switches -fstack-protector -O2 -Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DOPENSSL_LOAD_CONF -fPIC -I/usr/include/python2.7 -c hadoopy/_main.c -o build/temp.linux-i686-2.7/hadoopy/_main.o
hadoopy/_main.c:4:20: fatal error: Python.h: No such file or directory
#include "Python.h"
^
compilation terminated.
error: command 'gcc' failed with exit status 1
zypper install python-devel
gcc -pthread -fno-strict-aliasing -fomit-frame-pointer -fmessage-length=0 -grecord-gcc-switches -fstack-protector -O2 -Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DNDEBUG -fomit-frame-pointer -fmessage-length=0 -grecord-gcc-switches -fstack-protector -O2 -Wall -D_FORTIFY_SOURCE=2 -funwind-tables -fasynchronous-unwind-tables -g -DOPENSSL_LOAD_CONF -fPIC -I/usr/include/python2.7 -c hadoopy/_main.c -o build/temp.linux-i686-2.7/hadoopy/_main.o
hadoopy/_main.c:4:20: fatal error: Python.h: No such file or directory
#include "Python.h"
^
compilation terminated.
error: command 'gcc' failed with exit status 1
zypper install python-devel
Tuesday, February 25, 2014
Canon IP 1880 pada Opensuse
IJ Printer Driver Ver. 2.70 for Linux (rpm Common package)
http://support-asia.canon-asia.com/contents/ASIA/EN/0900718405.html
IJ Printer Driver Ver. 2.70 for Linux (rpm Package for iP1800 series)
http://support-asia.canon-asia.com/contents/ASIA/EN/0900718601.html
IJ Printer Driver Ver. 2.70 for Linux (Source file)
http://support-asia.canon-asia.com/contents/ASIA/EN/0900718505.html
Thursday, January 30, 2014
Beberapa Software Simulator/Emulator Yang Mendukung Konsep OpenFlow/SDN
Openflow adalah sebuah teknologi yang dapat melakukan kontrol terhadap switch sehingga data yang dikirim atau diterima dapat diatur sedemikian rupa sesuai dengan algoritma yang diimplementasikan pada hardware openflow (switch). Pada dasarnya sebuah device switch konvensional hanya beroperasi pada l2 dan l3, namun teknologi openflow memungkin switch dapat beroperasi pada layer l2 hingga l7.
Untuk mengimplementasikan OpenFlow tentu membutuhkan switch yang mendukung teknologi tersebut, sebagai alternatif hardware kita dapat menggunakan beberapa software simulator/emulator yang beredar luas di internet:
--|lanjut nanti|--
Saturday, January 25, 2014
Pasang Microsoft Core fonts di OpenSUSE
Fonts-fonts yang sering kita jumpai pada sistem operasi windows, pada distribusi openSUSE tidak terinstall secara default. Hal ini disebabkan karena font tersebut merupakan proprietary fonts dan bisa jadi para pengembang openSUSE tidak menyertakan perangkat lunak yang dimiliki secara eksklusif oleh pihak tertentu idealis dan untuk penggunaan proprietary software berlaku hukum serta aturan tertentu.
Hal ini bisa menjadi kendala bagi pengguna linux khususnya openSUSE dalam menyusun artikel ataupun tulisan ilmiah, Penulis pun sempat mengalami kendala ketika sidang proposal karena font yang saya gunakan berbeda dengan font yang dipakai dalam panduan penulisan ilmiah dikampus, akibatnya bisa rame dibully rame2 pada proses sidang.
Tuesday, January 14, 2014
Subscribe to:
Posts (Atom)