extend score log
This commit is contained in:
		
							parent
							
								
									a2b802a2b1
								
							
						
					
					
						commit
						79fed14337
					
				
					 2 changed files with 191 additions and 684 deletions
				
			
		
							
								
								
									
										57
									
								
								foo.py
									
										
									
									
									
								
							
							
						
						
									
										57
									
								
								foo.py
									
										
									
									
									
								
							| 
						 | 
					@ -5,6 +5,7 @@ from dataclasses import dataclass
 | 
				
			||||||
from math import floor
 | 
					from math import floor
 | 
				
			||||||
from datetime import datetime
 | 
					from datetime import datetime
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def _add_accumulated_score(df: pandas.DataFrame):
 | 
					def _add_accumulated_score(df: pandas.DataFrame):
 | 
				
			||||||
    acc_col = pandas.Series([0.0]).repeat(len(df)).reset_index(drop=True)
 | 
					    acc_col = pandas.Series([0.0]).repeat(len(df)).reset_index(drop=True)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -15,13 +16,36 @@ def _add_accumulated_score(df: pandas.DataFrame):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    df['accumulated_score'] = acc_col
 | 
					    df['accumulated_score'] = acc_col
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def load_score_log(path: str) -> pandas.DataFrame:
 | 
					def load_score_log(path: str) -> pandas.DataFrame:
 | 
				
			||||||
    scores = pandas.read_csv(path, sep=',',
 | 
					    return pandas.read_csv(path, sep=',',
 | 
				
			||||||
                           dtype={'score': int, 'sourcename': str, 'name': str, 'mapx': int, 'mapy': int},
 | 
					                           dtype={'score': int, 'sourcename': str, 'name': str, 'mapx': int, 'mapy': int},
 | 
				
			||||||
                           parse_dates=['when'], date_format='%d/%m/%Y %H:%M')
 | 
					                           parse_dates=['when'], date_format='%d/%m/%Y %H:%M')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def _calc_duration(row) -> int:
 | 
				
			||||||
 | 
					    score_per = None
 | 
				
			||||||
 | 
					    if row['sourcename'] == 'Capture':
 | 
				
			||||||
 | 
					        score_per = 1.0
 | 
				
			||||||
 | 
					    elif row['sourcename'] == 'Output Boost':
 | 
				
			||||||
 | 
					        score_per = 0.1
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        return 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return int(floor(row['score'] / score_per))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def _calc_event_start(row) -> pandas.Timestamp:
 | 
				
			||||||
 | 
					    return pandas.Timestamp(row['when'].timestamp() - row['seconds'], unit='s')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def extend_score_log(scores: pandas.DataFrame):
 | 
				
			||||||
    scores.sort_values('when', inplace=True)
 | 
					    scores.sort_values('when', inplace=True)
 | 
				
			||||||
    _add_accumulated_score(scores)
 | 
					    _add_accumulated_score(scores)
 | 
				
			||||||
    return scores
 | 
					
 | 
				
			||||||
 | 
					    scores['seconds'] = scores.apply(_calc_duration, axis=1)
 | 
				
			||||||
 | 
					    scores['when_start'] = scores.apply(_calc_event_start, axis=1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def generate_station_stats(score_log: pandas.DataFrame) -> pandas.DataFrame:
 | 
					def generate_station_stats(score_log: pandas.DataFrame) -> pandas.DataFrame:
 | 
				
			||||||
    station_count = len(score_log['name'].unique())
 | 
					    station_count = len(score_log['name'].unique())
 | 
				
			||||||
| 
						 | 
					@ -32,7 +56,7 @@ def generate_station_stats(score_log: pandas.DataFrame) -> pandas.DataFrame:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    assert len(summary) == station_count
 | 
					    assert len(summary) == station_count
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    common_join_args = {'on':'name', 'how':'left', 'validate': '1:1'}
 | 
					    common_join_args = {'on': 'name', 'how': 'left', 'validate': '1:1'}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # add total score
 | 
					    # add total score
 | 
				
			||||||
    summary = pandas.merge(summary, score_log[['name', 'score']].groupby('name').sum(), **common_join_args)
 | 
					    summary = pandas.merge(summary, score_log[['name', 'score']].groupby('name').sum(), **common_join_args)
 | 
				
			||||||
| 
						 | 
					@ -55,7 +79,8 @@ def generate_station_stats(score_log: pandas.DataFrame) -> pandas.DataFrame:
 | 
				
			||||||
    summary = pandas.merge(summary, max_boosts, **common_join_args)
 | 
					    summary = pandas.merge(summary, max_boosts, **common_join_args)
 | 
				
			||||||
    assert len(summary) == station_count
 | 
					    assert len(summary) == station_count
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    visits = score_log[(score_log['sourcename'] == 'Visit') | (score_log['sourcename'] == 'First Visit')][['name', 'score']].groupby('name')
 | 
					    visits = score_log[(score_log['sourcename'] == 'Visit') | (score_log['sourcename'] == 'First Visit')][
 | 
				
			||||||
 | 
					        ['name', 'score']].groupby('name')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # add total visits (count)
 | 
					    # add total visits (count)
 | 
				
			||||||
    summary = pandas.merge(summary, visits.count(), **common_join_args)
 | 
					    summary = pandas.merge(summary, visits.count(), **common_join_args)
 | 
				
			||||||
| 
						 | 
					@ -81,6 +106,7 @@ def generate_station_stats(score_log: pandas.DataFrame) -> pandas.DataFrame:
 | 
				
			||||||
    assert len(summary) == station_count
 | 
					    assert len(summary) == station_count
 | 
				
			||||||
    return summary
 | 
					    return summary
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def generate_score_per_second(score_log: pandas.DataFrame) -> pandas.DataFrame:
 | 
					def generate_score_per_second(score_log: pandas.DataFrame) -> pandas.DataFrame:
 | 
				
			||||||
    @dataclass
 | 
					    @dataclass
 | 
				
			||||||
    class ScoreSecond:
 | 
					    class ScoreSecond:
 | 
				
			||||||
| 
						 | 
					@ -89,27 +115,24 @@ def generate_score_per_second(score_log: pandas.DataFrame) -> pandas.DataFrame:
 | 
				
			||||||
        when: datetime
 | 
					        when: datetime
 | 
				
			||||||
        score: float
 | 
					        score: float
 | 
				
			||||||
        once: bool
 | 
					        once: bool
 | 
				
			||||||
 | 
					        event_start: bool
 | 
				
			||||||
        mapx: int
 | 
					        mapx: int
 | 
				
			||||||
        mapy: int
 | 
					        mapy: int
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def row_to_scoreseconds(row, score_per) -> typing.Iterator[ScoreSecond]:
 | 
					    def row_to_scoreseconds(row) -> typing.Iterator[ScoreSecond]:
 | 
				
			||||||
        seconds = int(floor(row.score / score_per))
 | 
					        score_per = row.score / row.seconds
 | 
				
			||||||
        assert row.score % score_per < 0.1
 | 
					        for elapsed in range(0, row.seconds):
 | 
				
			||||||
        when = int(floor(row.when.timestamp()))
 | 
					            timestamp = pandas.Timestamp(row.when_start.timestamp() + elapsed, unit='s')
 | 
				
			||||||
        for elapsed in range(0, seconds):
 | 
					 | 
				
			||||||
            timestamp = pandas.Timestamp(when - elapsed, unit='s')
 | 
					 | 
				
			||||||
            yield ScoreSecond(name=row.name, sourcename=row.sourcename, mapx=row.mapx, mapy=row.mapy, when=timestamp,
 | 
					            yield ScoreSecond(name=row.name, sourcename=row.sourcename, mapx=row.mapx, mapy=row.mapy, when=timestamp,
 | 
				
			||||||
                              score=score_per, once=False)
 | 
					                              score=score_per, once=False, event_start=(elapsed == 0))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def gen_scoreseconds() -> typing.Iterator[ScoreSecond]:
 | 
					    def gen_scoreseconds() -> typing.Iterator[ScoreSecond]:
 | 
				
			||||||
        for row in score_log.itertuples():
 | 
					        for row in score_log.itertuples():
 | 
				
			||||||
            if row.sourcename == "Capture":
 | 
					            if row.seconds == 0:  # one-off
 | 
				
			||||||
                yield from row_to_scoreseconds(row, 1.0)
 | 
					 | 
				
			||||||
            elif row.sourcename == "Output Boost":
 | 
					 | 
				
			||||||
                yield from row_to_scoreseconds(row, 0.1)
 | 
					 | 
				
			||||||
            else:  # one-off
 | 
					 | 
				
			||||||
                yield ScoreSecond(name=row.name, sourcename=row.sourcename, mapx=row.mapx, mapy=row.mapy, when=row.when,
 | 
					                yield ScoreSecond(name=row.name, sourcename=row.sourcename, mapx=row.mapx, mapy=row.mapy, when=row.when,
 | 
				
			||||||
                                  score=row.score, once=True)
 | 
					                                  score=row.score, once=True, event_start=True)
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                yield from row_to_scoreseconds(row)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    scoreseconds = pandas.DataFrame(gen_scoreseconds())
 | 
					    scoreseconds = pandas.DataFrame(gen_scoreseconds())
 | 
				
			||||||
    scoreseconds.sort_values(by=['when'], inplace=True)
 | 
					    scoreseconds.sort_values(by=['when'], inplace=True)
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										818
									
								
								notebook.ipynb
									
										
									
									
									
								
							
							
						
						
									
										818
									
								
								notebook.ipynb
									
										
									
									
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue