summaryrefslogtreecommitdiffstats
path: root/src/core/arm/mmu/cache.h
blob: d308d9b8748c5874779131c1d16977e9e00d1018 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
#ifndef _MMU_CACHE_H_
#define _MMU_CACHE_H_

typedef struct cache_line_t
{
    ARMword tag;        /*      cache line align address |
                   bit2: last half dirty
                   bit1: first half dirty
                   bit0: cache valid flag
                 */
    ARMword pa;        /*physical address */
    ARMword *data;        /*array of cached data */
} cache_line_t;
#define TAG_VALID_FLAG 0x00000001
#define TAG_FIRST_HALF_DIRTY 0x00000002
#define TAG_LAST_HALF_DIRTY    0x00000004

/*cache set association*/
typedef struct cache_set_s
{
    cache_line_t *lines;
    int cycle;
} cache_set_t;

enum
{
    CACHE_WRITE_BACK,
    CACHE_WRITE_THROUGH,
};

typedef struct cache_s
{
    int width;        /*bytes in a line */
    int way;        /*way of set asscociate */
    int set;        /*num of set */
    int w_mode;        /*write back or write through */
    //int a_mode;   /*alloc mode: random or round-bin*/
    cache_set_t *sets;
  /**/} cache_s;

typedef struct cache_desc_s
{
    int width;
    int way;
    int set;
    int w_mode;
//      int a_mode;
} cache_desc_t;


/*virtual address to cache set index*/
#define va_cache_set(va, cache_t) \
    (((va) / (cache_t)->width) & ((cache_t)->set - 1))
/*virtual address to cahce line aligned*/
#define va_cache_align(va, cache_t) \
        ((va) & ~((cache_t)->width - 1))
/*virtaul address to cache line word index*/
#define va_cache_index(va, cache_t) \
        (((va) & ((cache_t)->width - 1)) >> WORD_SHT)

/*see Page 558 in arm manual*/
/*set/index format value to cache set value*/
#define index_cache_set(index, cache_t) \
    (((index) / (cache_t)->width) & ((cache_t)->set - 1))

/*************************cache********************/
/* mmu cache init
 *
 * @cache_t :cache_t to init
 * @width    :cache line width in byte
 * @way        :way of each cache set
 * @set        :cache set num
 * @w_mode    :cache w_mode
 *
 * $ -1: error
 *      0: sucess
 */
int
mmu_cache_init (cache_s * cache_t, int width, int way, int set, int w_mode);

/* free a cache_t's inner data, the ptr self is not freed,
 * when needed do like below:
 *         mmu_cache_exit(cache);
 *         free(cache_t);
 *
 * @cache_t : the cache_t to free
 */
void mmu_cache_exit (cache_s * cache_t);

/* mmu cache search
 *
 * @state    :ARMul_State
 * @cache_t    :cache_t to search
 * @va        :virtual address
 *
 * $    NULL:    no cache match
 *         cache    :cache matched
 * */
cache_line_t *mmu_cache_search (ARMul_State * state, cache_s * cache_t,
                ARMword va);

/*  mmu cache search by set/index 
 *
 * @state    :ARMul_State
 * @cache_t    :cache_t to search
 * @index       :set/index value. 
 *
 * $    NULL:    no cache match
 *         cache    :cache matched
 * */

cache_line_t *mmu_cache_search_by_index (ARMul_State * state,
                     cache_s * cache_t, ARMword index);

/* mmu cache alloc
 *
 * @state :ARMul_State
 * @cache_t    :cache_t to alloc from
 * @va        :virtual address that require cache alloc, need not cache aligned
 * @pa        :physical address of va
 *
 * $    cache_alloced, always alloc OK
 */
cache_line_t *mmu_cache_alloc (ARMul_State * state, cache_s * cache_t,
                   ARMword va, ARMword pa);

/* mmu_cache_write_back write cache data to memory
 *
 * @state:
 * @cache_t :cache_t of the cache line
 * @cache : cache line
 */
void
mmu_cache_write_back (ARMul_State * state, cache_s * cache_t,
              cache_line_t * cache);

/* mmu_cache_clean: clean a cache of va in cache_t
 *
 * @state    :ARMul_State
 * @cache_t    :cache_t to clean
 * @va        :virtaul address
 */
void mmu_cache_clean (ARMul_State * state, cache_s * cache_t, ARMword va);
void
mmu_cache_clean_by_index (ARMul_State * state, cache_s * cache_t,
              ARMword index);

/* mmu_cache_invalidate : invalidate a cache of va
 *
 * @state    :ARMul_State
 * @cache_t    :cache_t to invalid
 * @va        :virt_addr to invalid
 */
void
mmu_cache_invalidate (ARMul_State * state, cache_s * cache_t, ARMword va);

void
mmu_cache_invalidate_by_index (ARMul_State * state, cache_s * cache_t,
                   ARMword index);

void mmu_cache_invalidate_all (ARMul_State * state, cache_s * cache_t);

void
mmu_cache_soft_flush (ARMul_State * state, cache_s * cache_t, ARMword pa);

cache_line_t* mmu_cache_dirty_cache(ARMul_State * state, cache_s * cache_t);

#endif /*_MMU_CACHE_H_*/