import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.stream.Stream; public class TextTest { public static void main(String[] args) { Path path1 = Paths.get("test.log"); Path path2 = Paths.get("test.log-2"); try (Streamstream = Files.lines(path1)) { Files.write(path2, (Iterable )stream.filter(s->s.trim().startsWith("{")).filter(s->s.trim().endsWith("}"))::iterator); } catch (IOException e) { e.printStackTrace(); } } }
분류 전체보기
- JAVA8 Stream Filter 를 활용한 file read to file write 2016.09.30
- Hadoop hdfs Tajo 메모 중 2016.09.20
- Spark 메모중.. 2016.08.29
- logstash apache COMMONAPACHELOG 2016.07.17
- 레드마인(redmine) 비밀번호 5회 실패 시 계정잠금.. redmine_login_audit Plugin + 수정하여.. 2016.06.20
- Redmine CentOs 에 설치 메모 2016.06.20
- Ubuntu 16.04 Desktop 초기 셋팅 2016.06.13
- Browser User-Agent 정보를 통한 브라우저 분류하기 2016.05.26
- Javascript 지도 Datamaps kor sample 2016.05.20
- sh(shell script) split 예제 2016.05.12
JAVA8 Stream Filter 를 활용한 file read to file write
Hadoop hdfs Tajo 메모 중
공부 메모 중
[환경]
- centos 6.4
- java 7
- hadoop-2.7.3
- tajo-0.11.3
[구성]
server1 : NameNode, TajoMaster
server2 : DataNode, TajoWorker (SecondaryNameNode)
server3 : DataNode, TajoWorker
server4 : DataNode, TajoWorker
# 클러스터 구성은 생략 bin/hadoop namenode -format # 초기화 시 rm -Rf /tmp/hadoop-tomcat 하고 나서 bin/start-all.sh 접속 http://192.168.100:50070/dfshealth.html
-- 데이터 : {"reg_dt":1474350438172,"jsessionid":"c09db86d-22c1-464b-92af-d612d7274c66","url_now":"http://ddakker.pe.kr/b","click_page_url":"http://ddakker.pe.kr/a","ip":"192.168.0.100","user_key":"1234567890","url_before":"http://ddakker.pe.kr/a","session_first_time":1474289056726,"user_agent":"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko"} bin/tsql default> \dfs -mkdir /tajo default> \dfs -ls / Found 1 items drwxr-xr-x - hadoop supergroup 0 2016-09-20 15:27 /tajo default> \dfs -put /logs/weblog /tajo/weblog default> \dfs -du -s -h /tajo/weblog 1.5 G /tajo/weblog create external table weblog ( reg_dt INT8, url_before text, jsessionid text, ip text, url_now text, click_page_url text, user_key text, session_first_time INT8) USING JSON LOCATION 'hdfs:/tajo/weblog'; default> select count(*) from weblog; Progress: 0%, response time: 0.465 sec Progress: 0%, response time: 0.467 sec Progress: 0%, response time: 0.869 sec Progress: 0%, response time: 1.67 sec Progress: 46%, response time: 2.672 sec Progress: 100%, response time: 2.735 sec ?count ------------------------------- 3503135 (1 rows, 2.735 sec, 16 B selected)
Spark 메모중..
공부 메모 중...
[환경]
- centos 6.4
- java 7
- spark-2.0.0-bin-hadoop2.7
cd jars # json format... 관련.. wget http://www.congiu.net/hive-json-serde/1.3/cdh5/json-serde-1.3-jar-with-dependencies.jar sbin/start-thriftserver.sh bin/beeline -u jdbc:hive2://localhost:10000 echo "1|abc|1.1|a" >> test.csv echo "2|def|2.3|b" >> test.csv create table if not exists testCsv (id INT, name STRING, score FLOAT, type STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '|'; load data local inpath '/usr/local/tomcat/test.csv' into table testCsv; 0: jdbc:hive2://localhost:10000> select * from testCsv; +-----+-------+--------------------+-------+--+ | id | name | score | type | +-----+-------+--------------------+-------+--+ | 1 | abc | 1.100000023841858 | a | | 2 | def | 2.299999952316284 | b | +-----+-------+--------------------+-------+--+ echo "{id: 1, name: 'abc', score: 1.1, type: 'a'}" >> test.json echo "{id: 2, name: 'def', score: 2.2, type: 'b'}" >> test.json create table if not exists testJson (id INT, name STRING, score FLOAT, type STRING) ROW FORMAT SERDE 'org.openx.data.jsonserde.JsonSerDe'; load data local inpath '/usr/local/tomcat/test.json' into table testJson; 0: jdbc:hive2://localhost:10000> select * from testJson; +-----+-------+--------------------+-------+--+ | id | name | score | type | +-----+-------+--------------------+-------+--+ | 1 | abc | 1.100000023841858 | a | | 2 | def | 2.200000047683716 | b | +-----+-------+--------------------+-------+--+
logstash apache COMMONAPACHELOG
삽질 1. timezone 부분
logstash, elasticsearch(plugis등), kibana 골치 아프니 UTC로 모두 바꾸니 편함...ㅡㅡ;
[logstash]
dd/MMM/yyyy:HH:mm:ss Z 했었는데 처리가 안되서.. 아래와 같이 했더니 해결...
-> https://discuss.elastic.co/t/how-to-set-timestamp-timezone/28401/10
[kibana]
Settings -> Advanced -> dateFormat:tz -> UTC 선택
[logconf/apache.conf]
input { stdin { } } filter { grok { patterns_dir => ["./patterns"] # 본인 로그 패턴에 따라 설정 match => { "message" => "%{COMMONAPACHELOG} %{NUMBER:responseTime} %{ALL_STR:qq1} %{ALL_STR:qq2} %{QS:referrer} %{QS:agent}" } } date { match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss +0900" ] timezone => "UTC" } } output { stdout { codec => json } }
[logconf/apache.conf]
# 파일 단위 -> elasticsearch input { file { path => "/usr/local/tomcat/logstash/dump_logs/ssl/*" start_position => "beginning" } } filter { grok { patterns_dir => ["./patterns"] # 본인 로그 패턴에 따라 설정 match => { "message" => "%{COMMONAPACHELOG} %{ALL_STR:qq1} %{ALL_STR:qq2} %{QS:referrer} %{QS:agent}" } } date { match => [ "timestamp" , "dd/MMM/yyyy:HH:mm:ss +0900" ] timezone => "UTC" } } output { elasticsearch { hosts => ["localhost:9200"] index => "apache-access-log-%{+YYYY-MM-dd}" document_type => "web01" } #stdout { codec => rubydebug } }
[haproxy]
input { #stdin { } file { type => "searchWas" path => "/usr/local/tomcat/logstash/dump_logs/haproxy/haproxy-searchWas.log*" start_position => "beginning" } file { type => "aqmp" path => "/usr/local/tomcat/logstash/dump_logs/haproxy/haproxy-aqmp.log*" start_position => "beginning" } } filter { if [type] == "searchWas" { grok { patterns_dir => ["./patterns"] # 본인 로그 패턴에 따라 설정 match => { "message" => "%{HAPROXYHTTP}" } } date { match => [ "accept_date", "dd/MMM/YYYY:HH:mm:ss.SSS" ] timezone => "UTC" } } else if [type] == "aqmp" { grok { patterns_dir => ["./patterns"] match => { "message" => "%{HAPROXYTCP}" } } date { match => [ "accept_date", "dd/MMM/YYYY:HH:mm:ss.SSS" ] timezone => "UTC" } } } output { #stdout { codec => json } if [type] == "searchWas" { elasticsearch { hosts => ["localhost:9200"] index => "haproxy-access-log-%{+YYYY-MM-dd}" document_type => "searchWas" } } else if [type] == "aqmp" { elasticsearch { hosts => ["localhost:9200"] index => "haproxy-access-log-%{+YYYY-MM-dd}" document_type => "aqmp" } } }
[logconf/test-db.conf] db -> elasticsearch 케이스
input { jdbc { jdbc_driver_library => "/usr/local/tomcat/logstash/lib/mysql-connector-java-5.1.38.jar" jdbc_driver_class => "com.mysql.jdbc.Driver" jdbc_connection_string => "jdbc:mysql://mariadb-dev:3306/log" jdbc_user => "log" jdbc_password => "test" statement => "SELECT * FROM ez_apache_access_log_201602" #schedule => "* * * * *" jdbc_paging_enabled => "true" jdbc_page_size => "50000" } }
[patterns/custom.conf]
ALL_STR .*
test exec
$ bin/logstash -f logconf/apache.conf 192.168.1.100 - - [17/Jul/2016:21:08:51 +0900] "GET /test/list HTTP/1.1" 200 17194 5657 -/- (-%) "http://ddakker.test.com/test/add?test=1" "Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Trident/6.0)"
mappings -> 수동 index/document mapping 생성 예제
$ curl -XPUT http://localhost:9200/log_httpd -d ' { "mappings": { "web01": { "properties": { "req_query_string": { "type": "string", "fields": { "raw": { "type": "string", "index": "not_analyzed" } } }, "req_dt": { "type": "string" }, "referrer_query_string": { "type": "string", "fields": { "raw": { "type": "string", "index": "not_analyzed" } } }, "referrer_domain": { "type": "string", "fields": { "raw": { "type": "string", "index": "not_analyzed" } } }, "req_ext": { "type": "string" }, "referrer_ext": { "type": "string" }, "res_status": { "type": "string" }, "req_mall_type": { "type": "string" }, "@version": { "type": "string" }, "ip": { "type": "string" }, "req_uri": { "type": "string", "fields": { "raw": { "type": "string", "index": "not_analyzed" } } }, "@timestamp": { "format": "strict_date_optional_time||epoch_millis", "type": "date" }, "referrer_mall_type": { "type": "string", "fields": { "raw": { "type": "string", "index": "not_analyzed" } } }, "method": { "type": "string" }, "referrer_uri": { "type": "string", "fields": { "raw": { "type": "string", "index": "not_analyzed" } } } } } } }'
레드마인(redmine) 비밀번호 5회 실패 시 계정잠금.. redmine_login_audit Plugin + 수정하여..
레드마인 사용 중 회사 보안지침 이슈 발생
※. 아래 트리거 사용 했다가 Rails 약간 공부 해서 "Login Audit 플러그인" 조금 수정 해서 해결(2016.06.27)
https://github.com/ddakker/redmine_login_audit
1. 비밀번호 주기적으로 변경
- redmine 3.3.0 으로 업그레이드 하니 기본 기능으로 추가되어 있다.
2. 로그인 시도 중 비밀번호 5회 실패 시 계정 잠금
- https://github.com/jbbarth/redmine_stronger 있었으나 Redmine 3.3.0 지원을 안 하는듯
- ruby 같은 언어를 다뤄본적이 없어서 소스 수정 하려다 포기 (직접 수정하면 업그레이드 문제)
plugin 개발은 현재로서 더 힘들고...
- https://github.com/martin-denizet/redmine_login_audit 기능 활용
로그인 성공/실패 로깅을 해주는 플러그인
해당 플러그인과 db trigger 을 접목해서 해결..
DELIMITER // CREATE TRIGGER login_audits_tri AFTER INSERT ON login_audits FOR EACH ROW BEGIN DECLARE failCnt INT; SET failCnt = ( select count(*) from login_audits where login = NEW.login and success = 0 and created_on >= DATE_FORMAT(DATE_ADD(now(),INTERVAL -5 MINUTE),'%Y-%m-%d %H:%i:%s') and id > ( select max(id) from login_audits where login = NEW.login and success = 1 ) ); IF NEW.success = 0 THEN IF failCnt > 5 THEN update users set `status`=3 where login = NEW.login; END IF; END IF; END; //
Redmine CentOs 에 설치 메모
외부 mysql 활용
yum install epel-release yum groupinstall "Development Tools" yum install openssl-devel readline-devel zlib-devel curl-devel libyaml-devel yum install httpd httpd-devel yum install ImageMagick ImageMagick-devel ## ubuntu desktop 에서 할때 # sudo apt-get install apache2 curl bison libbison-dev zlib1g-dev libssl-dev sqlite3 libsqlite3-dev autoconf automake build-essential libtool libreadline6-dev libyaml-dev libxml2-dev libcurl4-openssl-dev libssl-dev libgpg-error-dev autotools-dev imagemagick libmagickcore-dev libmagickwand-dev wget http://www.ruby-lang.org/ko/downloads/ tar zxvf ruby-2.3.1.tar.gz cd ruby-2.3.1 ./configure make make install gem install bundler --no-rdoc --no-ri wget http://www.redmine.org/releases/redmine-3.3.0.tar.gz tar xvzf redmine-3.3.0.tar.gz mv redmine-3.0.3 /usr/local/. ln -s redmine-3.0.3 redmine cp config/database.yml.example vi config/database.yml bundle install --without development test postgresql sqlite rake generate_secret_token RAILS_ENV=production rake db:migrate RAILS_ENV=production rake redmine:load_default_data #ruby bin/rails server webrick –e production –b 192.168.1.11 bundle exec rails server webrick -e production bundle exec rails server webrick -e production --port=40000 bundle exec rails server webrick -e production --binding=192.168.1.11 --port=40000 # thin 서버로 띄우기 gem install thin vi Gemfile # 하위 추가 gem "thin" bundle install --path vendor/bundle thin start -e production -p 40000 -c /usr/local/redmine --prefix=/redmine curl http://192.168.1.11:40000/ # context path 바꿔보기 cp config/additional_environment.rb additional_environment.rb.example vi config/additional_environment.rb # 하위 추가 config.relative_url_root = '/redmine' thin start -e production -p 40000 -c /usr/local/redmine --prefix=/redmine thin start -d -e production -p 40000 -c /usr/local/redmine --prefix=/redmine curl http://192.168.1.11:40000/redmine # 기존 웹서버 httpd 에 추가하기 ProxyPass /redmine http://192.168.1.11:40000/redmine curl http://dev.ddakker.pe.kr/redmine # 시작/중지/관리 startup.sh stop.sh rake_plugin.sh # 플러그인 설치 wget https://github.com/peclik/clipboard_image_paste/archive/master.zip unzip master.zip mv clipboard_image_paste-master plugins/clipboard_image_paste RAILS_ENV=production rake redmine:plugins (rake_plugin.sh 만들어 놓음) # 설치된 플러그인 1. 이미지 클립보드 붙여 넣기 - Clipboard image paste 2. 일감 진행현황 그래프 - Progressive Projects List plugin 3. 일감 상황 그래프 - Redmine Graphs plugin 4. 로그인 히스토리 - Redmine Login Audit plugin 5. 일감 작업자별 현황 - Redmine (Monitoring & Controlling | Monitoramento & Controle) # 플러그인 삭제 rake redmine:plugins:migrate NAME=플러그임이름 VERSION=0 RAILS_ENV=production rm -Rf plugins/플러그인이름
Ubuntu 16.04 Desktop 초기 셋팅
[한/영키 설정]
시스템 설정 -> 키보드 -> 바로가기 -> 자판입력 -> 다음입력 소스로전환 Alt 로 변경(Multi_key) 로 바뀜
텍스트 입력창 -> Hangul(Fcitx) 선택 확인 및 다음 소스로 번환 Multi_key 확인
혹시나 한/영키 안되면 터미널
sudo setkeycodes 72 122 엔터
[JDK 1.8 설치]
sudo add-apt-repository ppa:webupd8team/java
sudo apt-get update
sudo apt-get install oracle-java8-installer
java -version
[Eclipse 이슈]
eclipse.ini
--launcher.GTK_version
2
--launcher.appendVmargs -- 이전
런처에 아이콘 물음표라면..
sudo cp 설치/icon.xpm /usr/share/pixmaps/eclipse.xpm
[Unity Tweak Tool]
Ubuntu Software 에서 Unity Tweak Tool 설치
[우분투 런처 위치 이동 및 사이즈 변경]
Unity Tweak Tool -> Launcher -> Appearance -> Bottom 클릭
Icons size = 30
[우분투 Workspace 활성화]
Unity Tweak Tool -> Workspace Setting -> Workspace switcher 켬
Ctrl + Alt + ←, → 테스트
[우분투 Font 사이즈 변경]
Unity Tweak Tool -> Fonts -> 1 포인트 씩 내림 및 Window title Font는 8
Browser User-Agent 정보를 통한 브라우저 분류하기
브라우저 패턴이 너무 다양함...
하는데까지..
SELECT browser, COUNT(browser) AS cnt FROM ( SELECT CASE WHEN user_agent LIKE '%Windows%Chrome%' THEN 'Chrome' WHEN user_agent LIKE '%iPad%Chrome%' THEN 'ChromeIPad' WHEN user_agent LIKE '%iPhone%Chrome%' THEN 'ChromeIPhone' WHEN user_agent LIKE '%Android%Chrome%' THEN 'ChromeAndroid' WHEN user_agent LIKE '%Mac%Chrome%' THEN 'ChromeMacOs' WHEN user_agent LIKE '%Linux%Chrome%' THEN 'ChromeLinux' WHEN user_agent LIKE '%Chrome%' THEN 'ChromeEtc' WHEN user_agent LIKE '%Windows%Safari%' THEN 'Safari' WHEN user_agent LIKE '%iPad%Safari%' THEN 'SafariIPad' WHEN user_agent LIKE '%iPhone%Safari%' THEN 'SafariIPhone' WHEN user_agent LIKE '%Android%Safari%' THEN 'SafariAndroid' WHEN user_agent LIKE '%Mac%Safari%' THEN 'SafariMacOs' WHEN user_agent LIKE '%Linux%Safari%' THEN 'SafariLinux' WHEN user_agent LIKE '%Safari%' THEN 'SafariEtc' WHEN user_agent LIKE '%Windows%Firefox%' THEN 'Firefox' WHEN user_agent LIKE '%iPad%Firefox%' THEN 'FirefoxIPad' WHEN user_agent LIKE '%iPhone%Firefox%' THEN 'FirefoxIPhone' WHEN user_agent LIKE '%Android%Firefox%' THEN 'FirefoxAndroid' WHEN user_agent LIKE '%Mac%Firefox%' THEN 'FirefoxMacOs' WHEN user_agent LIKE '%Linux%Firefox%' THEN 'FirefoxLinux' WHEN user_agent LIKE '%Firefox%' THEN 'FirefoxEtc' WHEN user_agent LIKE '%MSIE 6%' THEN 'IE6' WHEN user_agent LIKE '%MSIE 7%' THEN 'IE7' WHEN user_agent LIKE '%MSIE 8%' THEN 'IE8' WHEN user_agent LIKE '%MSIE 9%' THEN 'IE9' WHEN user_agent LIKE '%MSIE 10%' THEN 'IE10' WHEN user_agent LIKE '%rv:11%' THEN 'IE11' WHEN user_agent LIKE '%iPad%' THEN 'AppIPad' WHEN user_agent LIKE '%iPhone%' THEN 'AppIPhone' WHEN user_agent LIKE '%Android%' THEN 'AppAndroid' WHEN user_conn_info LIKE '%Opera%' THEN 'Opera' ELSE 'EtcBrowser' END browser FROM USER_AGENT_HISTORY WHERE LOGIN_DT >= '20160517000001' AND LOGIN_DT <= '20160517999999' ) AS browsers GROUP BY browser ORDER BY cnt DESC
Javascript 지도 Datamaps kor sample
<script src="<c:url value="/resources/plugin/chart/datamap/d3.min.js" />"></script> <script src="<c:url value="/resources/plugin/chart/datamap/topojson.js" />"></script> <script src="<c:url value="/resources/plugin/chart/datamap/datamaps.kor.min.js" />"></script> <div id="map" style="position: relative; width: 500px; height: 500px; border: 1px red solid"></div> <script> jQuery(document).ready(function(){ var basic = new Datamap({ element : document.getElementById("map"), scope : 'kor', fills : { defaultFill : "#ABDDA4", authorHasTraveledTo : "#fa0fa0", 'colorTest1' : '#EAA9A8', 'colorTest2' : 'red', 'colorTest3' : 'blue' }, setProjection : function(element) { var projection = d3.geo.equirectangular().center([ 0, 0 ]) .rotate([ -128, -36 ]) .scale(5000) .translate([ element.offsetWidth / 2, element.offsetHeight / 2 ]); var path = d3.geo.path().projection(projection); return {path : path, projection : projection}; }, geographyConfig : { highlightBorderColor : '#bada55', popupTemplate : function(geography, data) { return '' + geography.properties.name + ' - Hit:' + data.hit + '' }, highlightBorderWidth : 2 }, data : { "KR.SO" : {"fillKey" : "colorTest1", "hit" : 5}, "KR.KG" : {"fillKey" : "colorTest2", "hit" : 50}, "KR.KN" : {"fillKey" : "colorTest3", "hit" : 100} } }); }); </script>
http://datamaps.github.io
sh(shell script) split 예제
#!/bin/sh SRC_FILE=/home/ci/sample/tomcat/domains/monitor_agent/sample-agent.jar # Tomcat WAS_ARR=( "tomcat@was01;user,admin" "tomcat@was02;user,admin" ) for WAS_INFO in "${WAS_ARR[@]}" do var=$(echo $WAS_INFO | awk -F";" '{print $1,$2}') set -- $var WAS=$1 DOMAIN_ARR_STR=$2 DOMAIN_ARR=$(echo $DOMAIN_ARR_STR | tr "," "\n") echo "ssh -q $WAS rm -Rf /usr/local/tomcat/domains/monitor_agent/sample-agent.jar" ssh -q $WAS rm -Rf /usr/local/tomcat/domains/monitor_agent/sample-agent.jar echo "scp -q -r $SRC_FILE $WAS:/usr/local/tomcat/domains/monitor_agent/." scp -q -r $SRC_FILE $WAS:/usr/local/tomcat/domains/monitor_agent/. for DOMAIN in $DOMAIN_ARR do echo "ssh -q $WAS cd '/usr/local/tomcat/domains/'$DOMAIN';' ./deploy.sh restart wait update 3" ssh -q $WAS cd '/usr/local/tomcat/domains/'$DOMAIN';' ./deploy.sh restart wait update 3 done done