Hadoop output is weird - hadoop

I'm playing with Hadoop for the first time, and wrote a MapReducer to take a log file and chunk it down. However, the output is... weird.
It looks like this:
2f09 3133 3134 3838 0a2f 0009 3137 0a2f
0000 1908 efbf bd7b efbf bd44 11ef bfbd
efbf bd2a efbf bdef bfbd 301b 79ef bfbd
5bef bfbd d290 efbf bdef bfbd 5349 efbf
bd5c efbf bd24 32ef bfbd 7bef bfbd 58ef
bfbd efbf bd16 efbf bdef bfbd 20ef bfbd
52ef bfbd 1fd7 ac1b efbf bd21 672b df86
3603 031a 54ef bfbd efbf bd09 310a 2f00
002b efbf bd53 53ef bfbd 2bef bfbd efbf
bd63 6125 efbf bdef bfbd 3c17 024e 4eef
bfbd efbf bd1d 7e72 efbf bd18 efbf bd4b
2332 efbf bdef bfbd 04ef bfbd 1d19 efbf
bd67 5a33 3270 7bef bfbd 75ef bfbd 6def
bfbd 0931 0a2f 0000 46ef bfbd ddb5 efbf
bd4d 62ef bfbd 7751 2048 efbf bdef bfbd
14ef bfbd efbf bdef bfbd 5463 efbf bdef
bfbd 5f12 efbf bdef bfbd 77ef bfbd 5fef
bfbd efbf bdef bfbd 32ef bfbd dd88 efbf
bdd8 b309 310a 2f00 0072 ccbd 0931 0a2f
0000 7457 efbf bdef bfbd 1632 efbf bdef
bfbd 21ef bfbd efbf bdef bfbd 563d 66ef
I did try it with a much smaller file originally, and it came up just fine in readable format. So I'm not entirely sure what the problem is... Has the encoding changed at some point in time while the file was being MapReduced?
I have absolutely no idea, so would appreciate some help to find out what's wrong, and what I can do to fix it, or to prevent it from happening again.
Thanks!
Edit: Added code
Thankfully, it's a nice short file since it's my first one ever... I've removed the imports and stuff to try to make it even shorter.
public class WCSLogParse {
public static class Map extends Mapper<LongWritable, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private static final Log LOG = LogFactory.getLog(WCSLogParse.class);
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
line = splitToUrl(line);
LOG.info("Line is "+line);
if(line.contains(".")) {
//do nothing
LOG.info("Skipping line");
}
else {
int lastSlash = line.lastIndexOf("/");
line = line.substring(lastSlash);
LOG.info("Command is "+line);
context.write(new Text(line), one);
}
}
private String splitToUrl(String line) {
int subBegin = line.indexOf('/');
int subEnd = line.indexOf(',',subBegin);
if(subBegin == -1 || subEnd == -1) {
return ".";
}
String url = line.substring(subBegin, subEnd);
//handles if it is from a CSV field
if(url.endsWith("\"")) {
url = url.substring(0, (url.length()-1));
}
return url;
}
private String getUrl(String line) {
String[] cols = line.split(",");
return cols[7];
}
};
public static class Reduce extends Reducer<Text, IntWritable, Text, IntWritable> {
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
context.write(key, new IntWritable(sum));
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
System.out.println("the args are "+args.toString());
Job job = new Job(conf, "WCSLogParse");
job.setJarByClass(WCSLogParse.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setMapperClass(Map.class);
job.setCombinerClass(Reduce.class);
job.setReducerClass(Reduce.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.waitForCompletion(true);
}
}
I'm kicking the job off in eclipse with the following arguments:
"/Volumes/Secondary Documents/logs/" "/Volumes/Secondary Documents/logs/output"

Related

java.lang.NullPointerException in pagination with streams

I am using streams to paginate a list of offers in spring boot.
Here's my Page :
#Table(name = "PAGE")
#Data
#NoArgsConstructor
public class Page <T>{
// current page
private int pageNumber;
// number of results displayed on each page
private int resultsPerPage;
// total number of results
private int totalResults;
// items to display on the current page
private List<T> items;
public Page(int pageNumber, int resultsPerPage, int totalResults, List<T> items) {
this.pageNumber = pageNumber;
this.resultsPerPage = resultsPerPage;
this.totalResults = totalResults;
this.items = items;
}
}
Here's my service :
#Override
public Page<JobOfferEntity> getAllJobOffersByPage( List<JobOfferEntity> jobOffers, int pageNumber, int resultsPerPage) {
int skipCount = (pageNumber - 1) * resultsPerPage;
int totalResult = jobOffers.size();
List<JobOfferEntity> jobOfferPage = jobOffers
.stream()
.skip(skipCount)
.limit(resultsPerPage)
.collect(Collectors.toList());
Page<JobOfferEntity> page = new Page<JobOfferEntity> (pageNumber, resultsPerPage, totalResult, jobOfferPage);
return page;
}
Here's my controller :
#GetMapping (path = "/pagination")
public Page<JobOfferEntity> getAllByPage(#RequestParam (defaultValue = "1") int pageNumber,
#RequestParam (defaultValue = "8") int resultsPerPage)
{
List<JobOfferEntity> jobOffers = this.jobOfferRepository.findAll() ;
return this.jobOfferService.getAllJobOffersByPage(jobOffers, pageNumber, resultsPerPage);
}
Yet I keep getting this error : java.lang.NullPointerException: null
Anyone can tell why ??
Here's the complete stacktrace :
java.lang.NullPointerException\r\n\tat com.polaris.controllers.recruitment.jobOffer.JobOfferController.getAllByPage(JobOfferController.java:54)\r\n\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\r\n\tat java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\r\n\tat java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\r\n\tat java.base/java.lang.reflect.Method.invoke(Method.java:566)\r\n\tat org.springframework.web.method.support.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:190)\r\n\tat org.springframework.web.method.support.InvocableHandlerMethod.invokeForRequest(InvocableHandlerMethod.java:138)\r\n\tat org.springframework.web.servlet.mvc.method.annotation.ServletInvocableHandlerMethod.invokeAndHandle(ServletInvocableHandlerMethod.java:105)\r\n\tat org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.invokeHandlerMethod(RequestMappingHandlerAdapter.java:878)\r\n\tat org.springframework.web.servlet.mvc.method.annotation.RequestMappingHandlerAdapter.handleInternal(RequestMappingHandlerAdapter.java:792)\r\n\tat org.springframework.web.servlet.mvc.method.AbstractHandlerMethodAdapter.handle(AbstractHandlerMethodAdapter.java:87)\r\n\tat org.springframework.web.servlet.DispatcherServlet.doDispatch(DispatcherServlet.java:1040)\r\n\tat org.springframework.web.servlet.DispatcherServlet.doService(DispatcherServlet.java:943)\r\n\tat org.springframework.web.servlet.FrameworkServlet.processRequest(FrameworkServlet.java:1006)\r\n\tat org.springframework.web.servlet.FrameworkServlet.doGet(FrameworkServlet.java:898)\r\n\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:626)\r\n\tat org.springframework.web.servlet.FrameworkServlet.service(FrameworkServlet.java:883)\r\n\tat javax.servlet.http.HttpServlet.service(HttpServlet.java:733)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:231)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:53)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.keycloak.adapters.springsecurity.filter.KeycloakAuthenticatedActionsFilter.doFilter(KeycloakAuthenticatedActionsFilter.java:57)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.keycloak.adapters.springsecurity.filter.KeycloakSecurityContextRequestFilter.doFilter(KeycloakSecurityContextRequestFilter.java:61)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.keycloak.adapters.springsecurity.filter.KeycloakPreAuthActionsFilter.doFilter(KeycloakPreAuthActionsFilter.java:96)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.keycloak.adapters.springsecurity.filter.KeycloakAuthenticationProcessingFilter.successfulAuthentication(KeycloakAuthenticationProcessingFilter.java:214)\r\n\tat org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter.doFilter(AbstractAuthenticationProcessingFilter.java:240)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat com.talan.polaris.components.CORSFilter.doFilterInternal(CORSFilter.java:45)\r\n\tat org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:320)\r\n\tat org.springframework.security.web.access.intercept.FilterSecurityInterceptor.invoke(FilterSecurityInterceptor.java:126)\r\n\tat org.springframework.security.web.access.intercept.FilterSecurityInterceptor.doFilter(FilterSecurityInterceptor.java:90)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.access.ExceptionTranslationFilter.doFilter(ExceptionTranslationFilter.java:118)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.session.SessionManagementFilter.doFilter(SessionManagementFilter.java:137)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.authentication.AnonymousAuthenticationFilter.doFilter(AnonymousAuthenticationFilter.java:111)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.keycloak.adapters.springsecurity.filter.KeycloakAuthenticatedActionsFilter.doFilter(KeycloakAuthenticatedActionsFilter.java:74)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.keycloak.adapters.springsecurity.filter.KeycloakSecurityContextRequestFilter.doFilter(KeycloakSecurityContextRequestFilter.java:92)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.servletapi.SecurityContextHolderAwareRequestFilter.doFilter(SecurityContextHolderAwareRequestFilter.java:158)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.savedrequest.RequestCacheAwareFilter.doFilter(RequestCacheAwareFilter.java:63)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.authentication.logout.LogoutFilter.doFilter(LogoutFilter.java:116)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.keycloak.adapters.springsecurity.filter.KeycloakAuthenticationProcessingFilter.successfulAuthentication(KeycloakAuthenticationProcessingFilter.java:214)\r\n\tat org.springframework.security.web.authentication.AbstractAuthenticationProcessingFilter.doFilter(AbstractAuthenticationProcessingFilter.java:240)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.keycloak.adapters.springsecurity.filter.KeycloakPreAuthActionsFilter.doFilter(KeycloakPreAuthActionsFilter.java:96)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.web.filter.CorsFilter.doFilterInternal(CorsFilter.java:92)\r\n\tat org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.header.HeaderWriterFilter.doHeadersAfter(HeaderWriterFilter.java:92)\r\n\tat org.springframework.security.web.header.HeaderWriterFilter.doFilterInternal(HeaderWriterFilter.java:77)\r\n\tat org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.context.SecurityContextPersistenceFilter.doFilter(SecurityContextPersistenceFilter.java:105)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.context.request.async.WebAsyncManagerIntegrationFilter.doFilterInternal(WebAsyncManagerIntegrationFilter.java:56)\r\n\tat org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)\r\n\tat org.springframework.security.web.FilterChainProxy$VirtualFilterChain.doFilter(FilterChainProxy.java:334)\r\n\tat org.springframework.security.web.FilterChainProxy.doFilterInternal(FilterChainProxy.java:215)\r\n\tat org.springframework.security.web.FilterChainProxy.doFilter(FilterChainProxy.java:178)\r\n\tat org.springframework.web.filter.DelegatingFilterProxy.invokeDelegate(DelegatingFilterProxy.java:358)\r\n\tat org.springframework.web.filter.DelegatingFilterProxy.doFilter(DelegatingFilterProxy.java:271)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.springframework.web.filter.RequestContextFilter.doFilterInternal(RequestContextFilter.java:100)\r\n\tat org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.springframework.web.filter.FormContentFilter.doFilterInternal(FormContentFilter.java:93)\r\n\tat org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.springframework.boot.actuate.metrics.web.servlet.WebMvcMetricsFilter.doFilterInternal(WebMvcMetricsFilter.java:93)\r\n\tat org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.springframework.web.filter.CharacterEncodingFilter.doFilterInternal(CharacterEncodingFilter.java:201)\r\n\tat org.springframework.web.filter.OncePerRequestFilter.doFilter(OncePerRequestFilter.java:119)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)\r\n\tat org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)\r\n\tat org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:202)\r\n\tat org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:97)\r\n\tat org.keycloak.adapters.tomcat.AbstractAuthenticatedActionsValve.invoke(AbstractAuthenticatedActionsValve.java:67)\r\n\tat org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:542)\r\n\tat org.keycloak.adapters.tomcat.AbstractKeycloakAuthenticatorValve.invoke(AbstractKeycloakAuthenticatorValve.java:181)\r\n\tat org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:143)\r\n\tat org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:92)\r\n\tat org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:78)\r\n\tat org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:343)\r\n\tat org.apache.coyote.http11.Http11Processor.service(Http11Processor.java:374)\r\n\tat org.apache.coyote.AbstractProcessorLight.process(AbstractProcessorLight.java:65)\r\n\tat org.apache.coyote.AbstractProtocol$ConnectionHandler.process(AbstractProtocol.java:888)\r\n\tat org.apache.tomcat.util.net.NioEndpoint$SocketProcessor.doRun(NioEndpoint.java:1597)\r\n\tat org.apache.tomcat.util.net.SocketProcessorBase.run(SocketProcessorBase.java:49)\r\n\tat java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)\r\n\tat java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)\r\n\tat org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)\r\n\tat java.base/java.lang.Thread.run(Thread.java:834)\r\n
I am also using keycloak so yeah
The problem was fixed. I only had to add #Autowired to the jobOfferRepositiry in the controller:
#Autowired
private JobOfferRepository jobOfferRepository;

Sort the output in Hadoop Mapreduce

I am a beginner using Hadoop and I want to read a text file through MapReduce and output it. I have set up a counter to see the order of the data, but the output is not in order. Here are my code and screenshot.
Question: How can we sort the output based on the value of the key?
Sample input data in text file:
199907 21 22 23 24 25
199808 26 27 28 29 30
199909 31 32 33 34 35
200010 36 37 38 39 40
200411 41 42 43 44 45
Mapper
public static class TestMapper
extends Mapper<LongWritable, Text, Text, Text>{
int days = 1;
#Override
public void map(LongWritable key, Text value, Context context
) throws IOException, InterruptedException {
/* get the file name */
FileSplit fileSplit = (FileSplit)context.getInputSplit();
String filename = fileSplit.getPath().getName();
//context.write(new Text(filename), new Text(""));
StringTokenizer token = new StringTokenizer(value.toString());
String yearMonth = token.nextToken();
if(Integer.parseInt(yearMonth) ==0)
return;
while(token.hasMoreTokens()){
context.write(new Text(yearMonth+" "+days),new Text(token.nextToken()));
}
days++;
}
}
Reducer
public static class TestReducer
extends Reducer<Text,Text,Text,Text> {
#Override
public void reduce(Text key, Iterable<Text> values,Context context)
throws IOException, InterruptedException {
ArrayList<String> valList = new ArrayList<String>();
for(Text val: values)
valList.add(val.toString());
context.write(key,new Text(valList.toString()));
}
}
Driver/Main class
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length < 2) {
System.err.println("Usage: Class name <in> [<in>...] <out>");
System.exit(2);
}
Job job = Job.getInstance(conf, "My Class");
job.addFileToClassPath(new Path("/myPath"));
job.setJarByClass(myJar.class);
job.setMapperClass(TestMapper.class);
job.setReducerClass(TestReducer.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.setInputDirRecursive(job, true);
for (int i = 0; i < otherArgs.length - 1; ++i) {
FileInputFormat.addInputPath(job, new Path(otherArgs[i]));
}
FileOutputFormat.setOutputPath(job, new Path(otherArgs[otherArgs.length - 1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
Screenshot part of the output

How do I to change all Switch to true into a ListView?

I have a page that has a ListView and it has Switch to the users select what the registers he wants. Then I created a main Switch where the users click and I want to change all the others into ListView to true or false.
How could I do this ?
XAML
<Switch x:Name="IsSelectAll" Toggled="OnChangeSelectAll" IsToggled="False" HorizontalOptions="EndAndExpand" ></Switch>
<ListView x:Name="GrupoDeProdutos"
SeparatorVisibility="Default">
<ListView.ItemTemplate>
<DataTemplate>
<ViewCell>
<StackLayout Orientation="Horizontal">
<StackLayout HorizontalOptions="StartAndExpand">
<Label Text="{Binding title}" HorizontalOptions="StartAndExpand" TextColor="Default"></Label>
</StackLayout>
<Switch x:Name="{Binding id}" IsToggled="{Binding active}"></Switch>
</StackLayout>
</ViewCell>
</DataTemplate>
</ListView.ItemTemplate>
</ListView>
Controller
public partial class PopUpGrupoProdutoUsuario : PopupPage{
private List<TableModel> listaModel = new List<TableModel>();
public PopUpGrupoProdutoUsuario()
{
InitializeComponent();
}
//select all switch true/false
private void OnChangeSelectAll(object sender, ToggledEventArgs args){
foreach (TableModel t in this.listaModel){
t.active = args.Value;
}
}
//class listview model
public class TableModel{
public int id { get; set; }
public String title { get; set; }
public Boolean active { get; set; }
public TableModel() { }
public TableModel(int id, String title, Boolean active) {
this.id = id;
this.title = title;
this.active= active;
}
}
}
EDIT
//class listview model
public class TableModel : INotifyPropertyChanged {
public event PropertyChangedEventHandler PropertyChanged;
public int id
{
get { return id; }
set
{
if(id != value)
{
id = value;
NotifyPropertyChanged();
}
}
}
public String title
{
get { return title; }
set
{
if(title != value)
{
title = value;
NotifyPropertyChanged();
}
}
}
public Boolean ativo
{
get { return ativo; }
set {
if(ativo != value){
ativo = value;
NotifyPropertyChanged();
}
}
}
public TableModel() {
if (DesignMode.IsDesignModeEnabled)
{
this.id = 0;
this.title = "default";
this.ativo = false;
}
}
public TableModel(int id, String title, Boolean ativo) {
if (DesignMode.IsDesignModeEnabled){
this.id = id;
this.title = title;
this.ativo = ativo;
}
}
protected virtual void NotifyPropertyChanged([CallerMemberName] string propertyName = ""){
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName));
}
}
Error
=================================================================
Native Crash Reporting
=================================================================
Got a SIGSEGV while executing native code. This usually indicates
a fatal error in the mono runtime or one of the native libraries
used by your application.
=================================================================
04-22 11:34:59.495 E/mono-rt (20272): /proc/self/maps:
=================================================================
Basic Fault Adddress Reporting
04-22 11:34:59.495 E/mono-rt (20272): 12c00000-32c00000 rw-p 00000000 00:01 10738 /dev/ashmem/dalvik-main space (region space) (deleted)
04-22 11:34:59.495 E/mono-rt (20272): 5af61000-5af65000 r-xp 00000000 08:06 331 /system/bin/app_process32
04-22 11:34:59.495 E/mono-rt (20272): 5af66000-5af67000 r--p 00004000 08:06 331 /system/bin/app_process32
04-22 11:34:59.495 E/mono-rt (20272): 5af67000-5af68000 rw-p 00000000 00:00 0
04-22 11:34:59.495 E/mono-rt (20272): 6f101000-6f2e7000 rw-p 00000000 08:13 105905 /data/dalvik-cache/x86/system#framework#boot.art
04-22 11:34:59.495 E/mono-rt (20272): 6f2e7000-6f3a4000 rw-p 00000000 08:13 105906 /data/dalvik-cache/x86/system#framework#boot-core-libart.art
04-22 11:34:59.495 E/mono-rt (20272): 6f3a4000-6f3e1000 rw-p 00000000 08:13 105907 /data/dalvik-cache/x86/system#framework#boot-conscrypt.art=================================================================
Memory around native instruction pointer (0xc318029f):0xc318028f 8b 4d f8 8d 64 24 00 90 90 90 8b 45 08 89 04 24 .M..d$.....E...$
0xc318029f e8 b4 ff ff ff 89 45 f0 8b 4d f8 8d 64 24 00 90 ......E..M..d$..
0xc31802af 90 90 8b 45 f0 8b f0 8b 4d f8 8d 64 24 00 90 90 ...E....M..d$...
0xc31802bf 90 8b c6 8b 4d f8 8d 64 24 00 90 90 90 8d 65 fc ....M..d$.....e.
No native Android stacktrace (see debuggerd output).
=================================================================
Managed Stacktrace:
=================================================================
at TableModel:get_title <0x00047>
at TableModel:get_title <0x0004b>
at TableModel:get_title <0x0004b>
at TableModel:get_title <0x0004b>
at TableModel:get_title <0x0004b>
at TableModel:get_title <0x0004b>
at TableModel:get_title <0x0004b>
at TableModel:get_title <0x0004b>
at TableModel:get_title <0x0004b>
at TableModel:get_title <0x0004b>
at TableMod
Your code will end in an infinite loop. Your "Title" property is the reason. Create a private member "title" and a public property "Title". That should do the trick.

Want to implement or read odd records using map reduce

I have an usecase where from a file file i have to read odd lines using java map reduce:
But as per the Inputformat class that only reads '\n' as the line termination. Bu t i want read as follows:
INPUT:
sampat
kumar
hadoop
mapredue
OUTPUT:
sampat
hadoop
you can achieve the desired output based on your input with this way also: (not need to write custom Input/output format)
input:
sampat1 kumar2 hadoop3 mapredue4 sampat1 kumar2 hadoop3 mapredue4 sampat1 kumar2 hadoop3 mapredue4 sampat1 kumar2 hadoop3 mapredue4 sampat1 kumar2 hadoop3 mapredue4
output:
sampat1 hadoop3 sampat1 hadoop3 sampat1 hadoop3 sampat1 hadoop3 sampat1 hadoop3
code:
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class OddLine {
public static class OddLineMapper extends Mapper<Object, Text, Text, Text> {
private StringBuilder sb = new StringBuilder("");
#Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
String[] lines = value.toString().split(" ");
for(int i=0; i < lines.length; i+=2)
sb.append(lines[i] + " ");
context.write(new Text(""), new Text(sb.toString()));
}
}
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
Job job = Job.getInstance(conf, "Get odd words");
job.setJarByClass(OddLine.class);
job.setMapperClass(OddLineMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(Text.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
FileSystem fs = null;
Path dstFilePath = new Path(args[1]);
try {
fs = dstFilePath.getFileSystem(conf);
if (fs.exists(dstFilePath))
fs.delete(dstFilePath, true);
} catch (IOException e1) {
e1.printStackTrace();
}
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}

First Mapper produces output from another block (file) and the 1st part files generated holds data from block 2

First Mapper produces output from another block (file) and the 1st part files generated holds data from block 2.
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class MainMaxTemp {
public static void main(String[] args) throws Exception {
if (args.length != 2) {
System.err.println("Usage: MaxTemperature <input path> <output path>");
System.exit(-1);
}
Job job = new Job();
job.setNumReduceTasks(2);
job.setJarByClass(MainMaxTemp.class);
job.setJobName("Max temperature");
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
job.setMapperClass(WeatherData.class);
// job.setReducerClass(MaxTemperatureReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
public class WeatherData extends Mapper<LongWritable, Text, Text, IntWritable> {
private static final int MISSING = 9999;
#Override
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String line = value.toString();
String words[]=line.split(" ");
String year = words[0];
int airTemperature;
airTemperature=Integer.parseInt(words[1]);
context.write(new Text(year), new IntWritable(airTemperature));
}
}
Input file 1
1901 23
1902 67
1902 89
1901 101
1889 -90
1700 189
1901 -9
Input file 2
2901 23
2902 67
2902 89
2901 101
2889 -90
2700 189
2901 -9
part file 1
1700 189
1889 -90
1902 89
1902 67
2901 -9
2901 101
2901 23
part file 2
1901 -9
1901 101
1901 23
2700 189
2889 -90
2902 89
2902 67

Resources