hi,

I try to get a 1KHz sine wave with an ATmega32 and a 10 bit R-2R DAC.

I put 100 sine value in a table and I use a timer so that each time the timer counter overflow, the duration is equal to the duration of one of the 100 steps from 0 to 2pi.

So an interrupt is executed to change de value of the sine to the next value of the 100 value of the sine table. After 100 times, the period is completed and it starts over from the beginning of the table.

I get a good sine but the problem is that I don't get 1KHz with a timer who should give me 1KHz. I get something like 420Hz.

My caalculation is :

TCCR0 |= (1 << CS01); //Prescaler = 8

TCNT0 = 256 - 20; // (1/16MHZ ) * 8 * 20 = (1/1000) / 100

// 1/1000 is the period for 1Khz, but the timer has to overflow every period/100 because we calculte 100 sine value over a period

My question is why it is not accurate ?

#define F_CPU 16000000UL #include <avr/io.h> #include <avr/interrupt.h> #include <math.h> /*Global variables declaration*/ int i = 0; float x = 0; double const pi = 3.14159265359; float sine[100]; //Sine value int sineCast[100]; //Sine value converted in integer int main(void) { /*Direction and PORT initialization */ DDRD = 0xFF; //PortD as output, 8 LSBits of the 10 bit DAC PORTD = 0x00; //PortD = LOW DDRB = 0xFF;//PortB as output, 2 MSbits of the 10 bit DAC PORTB = 0x00; //PortB = LOW /*100 sine value (float) put in a table*/ for(i = 0; i < 100; i++) { sine[i] = 512 + (512 * sin(x)); x += 0.0628; // 2pi / 100 = 0.0628 } /*The sine values converted to int*/ for(i = 0; i < 100; i++) { sineCast[i] = (int)sine[i]; } i = 0; TCCR0 |= (1 << CS01); //Prescaler = 8 TCNT0 = 256 - 20; // (1/16MHZ ) * 8 * 20 = (1/1000) / 100 TIMSK |= (1 << TOIE0); sei(); while (1) { } } ISR (TIMER0_OVF_vect) { PORTD = sineCast[i]; PORTB = sineCast[i] >> 8; //PB1 and PB0 (MSBits) active i++; if(i > 99) i = 0; TCNT0 = 256 - 20; TIFR = (1 << TOV0); }